[ 468.973459] env[61852]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61852) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 468.973797] env[61852]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61852) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 468.973845] env[61852]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61852) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 468.974198] env[61852]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 469.083616] env[61852]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61852) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 469.094913] env[61852]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=61852) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 469.749763] env[61852]: INFO nova.virt.driver [None req-df08c5ee-e1ba-452f-8ee4-e7f83fafcdf0 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 469.861818] env[61852]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 469.862000] env[61852]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 469.862088] env[61852]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61852) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 472.913592] env[61852]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-49adbf75-e2e8-484d-8b11-3a5fe21dfe2d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.929684] env[61852]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61852) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 472.929684] env[61852]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-96895b14-11fd-4705-83f7-523d43606174 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.964699] env[61852]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 953b0. [ 472.964856] env[61852]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.103s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 472.965436] env[61852]: INFO nova.virt.vmwareapi.driver [None req-df08c5ee-e1ba-452f-8ee4-e7f83fafcdf0 None None] VMware vCenter version: 7.0.3 [ 472.968861] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6216c624-798a-4402-9145-0ef0fe219293 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.986149] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc442f3-a871-46f8-a929-170ee5943a28 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.991874] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7c978a-19e0-49da-b981-d7591a090690 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.998250] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899de73d-0a70-4c42-b2ad-f773fe269865 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.010933] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c4d208-0a73-4fcc-acb8-1699693059ad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.016757] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370b5e19-afc9-44c0-9ac6-cd1fb2b23faa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.046637] env[61852]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-d4024e9f-ae3b-447d-840a-e7f61f161d34 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.051428] env[61852]: DEBUG nova.virt.vmwareapi.driver [None req-df08c5ee-e1ba-452f-8ee4-e7f83fafcdf0 None None] Extension org.openstack.compute already exists. {{(pid=61852) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:225}} [ 473.054116] env[61852]: INFO nova.compute.provider_config [None req-df08c5ee-e1ba-452f-8ee4-e7f83fafcdf0 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 473.558070] env[61852]: DEBUG nova.context [None req-df08c5ee-e1ba-452f-8ee4-e7f83fafcdf0 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),925cd574-c3ed-4a5d-be67-1408562e5a9c(cell1) {{(pid=61852) load_cells /opt/stack/nova/nova/context.py:464}} [ 473.560143] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 473.560367] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 473.561092] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 473.561522] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Acquiring lock "925cd574-c3ed-4a5d-be67-1408562e5a9c" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 473.561712] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Lock "925cd574-c3ed-4a5d-be67-1408562e5a9c" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 473.562736] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Lock "925cd574-c3ed-4a5d-be67-1408562e5a9c" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 473.583405] env[61852]: INFO dbcounter [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Registered counter for database nova_cell0 [ 473.591471] env[61852]: INFO dbcounter [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Registered counter for database nova_cell1 [ 473.595222] env[61852]: DEBUG oslo_db.sqlalchemy.engines [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61852) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 473.595572] env[61852]: DEBUG oslo_db.sqlalchemy.engines [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61852) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 473.604012] env[61852]: ERROR nova.db.main.api [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 473.604012] env[61852]: result = function(*args, **kwargs) [ 473.604012] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 473.604012] env[61852]: return func(*args, **kwargs) [ 473.604012] env[61852]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 473.604012] env[61852]: result = fn(*args, **kwargs) [ 473.604012] env[61852]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 473.604012] env[61852]: return f(*args, **kwargs) [ 473.604012] env[61852]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 473.604012] env[61852]: return db.service_get_minimum_version(context, binaries) [ 473.604012] env[61852]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 473.604012] env[61852]: _check_db_access() [ 473.604012] env[61852]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 473.604012] env[61852]: stacktrace = ''.join(traceback.format_stack()) [ 473.604012] env[61852]: [ 473.604012] env[61852]: ERROR nova.db.main.api [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 473.604012] env[61852]: result = function(*args, **kwargs) [ 473.604012] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 473.604012] env[61852]: return func(*args, **kwargs) [ 473.604012] env[61852]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 473.604012] env[61852]: result = fn(*args, **kwargs) [ 473.604012] env[61852]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 473.604012] env[61852]: return f(*args, **kwargs) [ 473.604012] env[61852]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 473.604012] env[61852]: return db.service_get_minimum_version(context, binaries) [ 473.604012] env[61852]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 473.604012] env[61852]: _check_db_access() [ 473.604012] env[61852]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 473.604012] env[61852]: stacktrace = ''.join(traceback.format_stack()) [ 473.604012] env[61852]: [ 473.604751] env[61852]: WARNING nova.objects.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Failed to get minimum service version for cell 925cd574-c3ed-4a5d-be67-1408562e5a9c [ 473.604751] env[61852]: WARNING nova.objects.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 473.604751] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Acquiring lock "singleton_lock" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 473.604751] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Acquired lock "singleton_lock" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 473.604751] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Releasing lock "singleton_lock" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 473.604751] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Full set of CONF: {{(pid=61852) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 473.604751] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ******************************************************************************** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 473.604751] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Configuration options gathered from: {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 473.604960] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 473.604960] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 473.604960] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ================================================================================ {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 473.604960] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] allow_resize_to_same_host = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.604960] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] arq_binding_timeout = 300 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.604960] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] backdoor_port = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605146] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] backdoor_socket = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605146] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] block_device_allocate_retries = 60 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605146] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] block_device_allocate_retries_interval = 3 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605146] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cert = self.pem {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605146] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605146] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute_monitors = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605146] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] config_dir = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605374] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] config_drive_format = iso9660 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605374] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605684] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] config_source = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605684] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] console_host = devstack {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605830] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] control_exchange = nova {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.605981] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cpu_allocation_ratio = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.606163] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] daemon = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.606329] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] debug = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.606481] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] default_access_ip_network_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.606641] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] default_availability_zone = nova {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.606788] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] default_ephemeral_format = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.606949] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] default_green_pool_size = 1000 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.607214] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.607377] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] default_schedule_zone = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.607532] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] disk_allocation_ratio = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.607684] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] enable_new_services = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.607885] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] enabled_apis = ['osapi_compute'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.608059] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] enabled_ssl_apis = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.608218] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] flat_injected = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.608373] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] force_config_drive = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.608526] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] force_raw_images = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610011] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] graceful_shutdown_timeout = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610011] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] heal_instance_info_cache_interval = 60 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610011] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] host = cpu-1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610011] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610011] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610011] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610011] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610228] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] instance_build_timeout = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610228] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] instance_delete_interval = 300 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610364] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] instance_format = [instance: %(uuid)s] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610514] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] instance_name_template = instance-%08x {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610668] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] instance_usage_audit = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.610834] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] instance_usage_audit_period = month {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.611025] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.611203] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.611365] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] internal_service_availability_zone = internal {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.611516] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] key = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.611670] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] live_migration_retry_count = 30 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.611833] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] log_color = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.612020] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] log_config_append = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.612196] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.612352] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] log_dir = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.612505] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] log_file = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.612630] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] log_options = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.612786] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] log_rotate_interval = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.612959] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] log_rotate_interval_type = days {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.613136] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] log_rotation_type = none {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.613264] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.613384] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615012] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615012] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615012] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615012] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] long_rpc_timeout = 1800 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615012] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] max_concurrent_builds = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615012] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] max_concurrent_live_migrations = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615210] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] max_concurrent_snapshots = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615210] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] max_local_block_devices = 3 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615210] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] max_logfile_count = 30 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615210] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] max_logfile_size_mb = 200 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615210] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] maximum_instance_delete_attempts = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615335] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] metadata_listen = 0.0.0.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615454] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] metadata_listen_port = 8775 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615544] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] metadata_workers = 2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615698] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] migrate_max_retries = -1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.615856] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] mkisofs_cmd = genisoimage {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.616079] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619014] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] my_ip = 10.180.1.21 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619014] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] network_allocate_retries = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619014] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619014] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619014] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] osapi_compute_listen_port = 8774 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619014] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] osapi_compute_unique_server_name_scope = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619014] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] osapi_compute_workers = 2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619240] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] password_length = 12 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619240] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] periodic_enable = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619240] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] periodic_fuzzy_delay = 60 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619240] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] pointer_model = usbtablet {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619240] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] preallocate_images = none {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619240] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] publish_errors = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619240] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] pybasedir = /opt/stack/nova {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619412] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ram_allocation_ratio = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619412] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] rate_limit_burst = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619412] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] rate_limit_except_level = CRITICAL {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619412] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] rate_limit_interval = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619412] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] reboot_timeout = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619412] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] reclaim_instance_interval = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619412] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] record = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619578] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] reimage_timeout_per_gb = 60 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619734] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] report_interval = 120 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619816] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] rescue_timeout = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.619970] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] reserved_host_cpus = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.620135] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] reserved_host_disk_mb = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.620288] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] reserved_host_memory_mb = 512 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.620443] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] reserved_huge_pages = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.620594] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] resize_confirm_window = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.620747] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] resize_fs_using_block_device = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.620900] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] resume_guests_state_on_host_boot = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.621114] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.621281] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] rpc_response_timeout = 60 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.621433] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] run_external_periodic_tasks = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.621595] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] running_deleted_instance_action = reap {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.621748] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.621902] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] running_deleted_instance_timeout = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.622126] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler_instance_sync_interval = 120 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.622296] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_down_time = 720 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.622457] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] servicegroup_driver = db {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.622606] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] shell_completion = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.622760] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] shelved_offload_time = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.622911] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] shelved_poll_interval = 3600 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.623095] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] shutdown_timeout = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.623258] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] source_is_ipv6 = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.623413] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ssl_only = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.623653] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625021] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] sync_power_state_interval = 600 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625021] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] sync_power_state_pool_size = 1000 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625021] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] syslog_log_facility = LOG_USER {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625021] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] tempdir = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625021] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] timeout_nbd = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625021] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] transport_url = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625021] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] update_resources_interval = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625233] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] use_cow_images = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625233] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] use_eventlog = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625233] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] use_journal = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625377] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] use_json = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625506] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] use_rootwrap_daemon = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625655] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] use_stderr = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625803] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] use_syslog = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.625950] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vcpu_pin_set = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.626121] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plugging_is_fatal = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.626282] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plugging_timeout = 300 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.626437] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] virt_mkfs = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.626589] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] volume_usage_poll_interval = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.626740] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] watch_log_file = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.626896] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] web = /usr/share/spice-html5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 473.627088] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.627251] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.627409] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.627582] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_concurrency.disable_process_locking = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.628166] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.628354] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.628522] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630013] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630013] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630013] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630013] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.auth_strategy = keystone {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630013] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.compute_link_prefix = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630013] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630013] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.dhcp_domain = novalocal {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630231] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.enable_instance_password = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630231] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.glance_link_prefix = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630358] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630474] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630636] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.instance_list_per_project_cells = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630793] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.list_records_by_skipping_down_cells = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.630981] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.local_metadata_per_cell = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634018] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.max_limit = 1000 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634018] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.metadata_cache_expiration = 15 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634018] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.neutron_default_tenant_id = default {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634018] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.response_validation = warn {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634018] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.use_neutron_default_nets = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634018] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634018] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634231] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634231] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634231] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.vendordata_dynamic_targets = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634231] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.vendordata_jsonfile_path = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634231] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634231] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.backend = dogpile.cache.memcached {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634231] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.backend_argument = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634407] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.config_prefix = cache.oslo {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634407] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.dead_timeout = 60.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634407] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.debug_cache_backend = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634407] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.enable_retry_client = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634407] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.enable_socket_keepalive = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634532] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.enabled = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634682] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.enforce_fips_mode = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634734] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.expiration_time = 600 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.634892] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.hashclient_retry_attempts = 2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.635062] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636343] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.memcache_dead_retry = 300 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636343] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.memcache_password = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636343] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636343] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636343] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.memcache_pool_maxsize = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636343] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636343] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.memcache_sasl_enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636879] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636879] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636879] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.memcache_username = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.636879] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.proxies = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.637096] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.redis_db = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.637126] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.redis_password = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.637290] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.637464] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.637630] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.redis_server = localhost:6379 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.637791] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.redis_socket_timeout = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.637951] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.redis_username = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.638176] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.retry_attempts = 2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.638300] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.retry_delay = 0.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.638456] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.socket_keepalive_count = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.638617] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.socket_keepalive_idle = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.638776] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.socket_keepalive_interval = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.638946] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.tls_allowed_ciphers = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.639112] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.tls_cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.639271] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.tls_certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.639433] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.tls_enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.639588] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cache.tls_keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.639835] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.auth_section = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.640046] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.auth_type = password {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.640223] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.640403] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.640567] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.640729] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.640893] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.cross_az_attach = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.641092] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.debug = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.641263] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.endpoint_template = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.641430] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.http_retries = 3 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.641591] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.641750] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.641933] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.os_region_name = RegionOne {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.642131] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.642300] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cinder.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.642471] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.642647] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.cpu_dedicated_set = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.642877] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.cpu_shared_set = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.643077] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.image_type_exclude_list = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.643251] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.643416] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.643578] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.643740] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.643910] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.644084] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.resource_provider_association_refresh = 300 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.644253] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.644415] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.shutdown_retry_interval = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.644594] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.644772] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] conductor.workers = 2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.644951] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] console.allowed_origins = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.645142] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] console.ssl_ciphers = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.645320] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] console.ssl_minimum_version = default {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.645488] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] consoleauth.enforce_session_timeout = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.645657] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] consoleauth.token_ttl = 600 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.645823] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.645980] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.646161] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.646321] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.connect_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.646483] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.connect_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.646636] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.endpoint_override = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.646796] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.646950] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.647122] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.max_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.647279] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.min_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.647435] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.region_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.647592] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.retriable_status_codes = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.647747] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.service_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.647912] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.service_type = accelerator {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.648083] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.648246] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.status_code_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.648401] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.status_code_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.648559] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.648738] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.648902] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] cyborg.version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.649102] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.backend = sqlalchemy {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.649283] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.connection = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.649744] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.connection_debug = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.649744] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.connection_parameters = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.649825] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.connection_recycle_time = 3600 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.650028] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.connection_trace = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.650193] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.db_inc_retry_interval = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.650363] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.db_max_retries = 20 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.650526] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.db_max_retry_interval = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.650687] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.db_retry_interval = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.650848] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.max_overflow = 50 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.651047] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.max_pool_size = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.651228] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.max_retries = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.651401] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.651559] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.mysql_wsrep_sync_wait = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.651717] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.pool_timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.651877] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.retry_interval = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.652083] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.slave_connection = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.652258] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.sqlite_synchronous = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.652423] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] database.use_db_reconnect = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.652600] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.backend = sqlalchemy {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.652771] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.connection = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.652934] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.connection_debug = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.653128] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.connection_parameters = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.653298] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.connection_recycle_time = 3600 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.653462] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.connection_trace = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.653625] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.db_inc_retry_interval = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.653788] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.db_max_retries = 20 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.653949] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.db_max_retry_interval = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.654127] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.db_retry_interval = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.654290] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.max_overflow = 50 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.654451] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.max_pool_size = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.654612] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.max_retries = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.654780] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.654942] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.655114] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.pool_timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.655278] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.retry_interval = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.655436] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.slave_connection = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.655596] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] api_database.sqlite_synchronous = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.655770] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] devices.enabled_mdev_types = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.655947] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.656128] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.656292] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ephemeral_storage_encryption.enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.656453] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.656625] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.api_servers = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.656787] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.656947] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.657169] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.657297] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.connect_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.657454] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.connect_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.657614] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.debug = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.657778] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.default_trusted_certificate_ids = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.657939] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.enable_certificate_validation = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.658119] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.enable_rbd_download = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.658279] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.endpoint_override = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.658443] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.658602] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.658759] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.max_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.658920] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.min_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.659086] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.num_retries = 3 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.659257] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.rbd_ceph_conf = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.659416] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.rbd_connect_timeout = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.659582] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.rbd_pool = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.659746] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.rbd_user = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.659979] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.region_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.660172] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.retriable_status_codes = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.660337] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.service_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.660506] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.service_type = image {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.660668] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.660834] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.status_code_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.661039] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.status_code_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.661223] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.661406] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.661570] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.verify_glance_signatures = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.661730] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] glance.version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.661898] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] guestfs.debug = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.662121] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] mks.enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.662484] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.662676] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] image_cache.manager_interval = 2400 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.662845] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] image_cache.precache_concurrency = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.663025] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] image_cache.remove_unused_base_images = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.663200] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.663368] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.663544] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] image_cache.subdirectory_name = _base {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.663718] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.api_max_retries = 60 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.663881] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.api_retry_interval = 2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.664049] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.auth_section = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.664219] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.auth_type = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.664378] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.664537] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.664698] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.664858] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.conductor_group = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.665024] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.connect_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.665189] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.connect_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.665348] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.endpoint_override = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.665505] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.665664] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.665823] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.max_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.665977] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.min_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.666196] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.peer_list = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.666366] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.region_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.666524] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.retriable_status_codes = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.666691] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.serial_console_state_timeout = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.666847] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.service_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.667022] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.service_type = baremetal {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.667213] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.shard = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.667381] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.667538] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.status_code_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.667694] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.status_code_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.667851] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.668038] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.668205] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ironic.version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.668389] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.668563] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] key_manager.fixed_key = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.668744] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.668909] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.barbican_api_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.669078] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.barbican_endpoint = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.669255] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.barbican_endpoint_type = public {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.669414] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.barbican_region_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.669572] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.669730] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.669891] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.670153] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.670332] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.670499] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.number_of_retries = 60 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.670663] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.retry_delay = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.670826] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.send_service_user_token = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.671030] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.671194] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.671362] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.verify_ssl = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.671520] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican.verify_ssl_path = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.671687] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican_service_user.auth_section = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.671849] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican_service_user.auth_type = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.672057] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican_service_user.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.672237] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican_service_user.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.672402] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican_service_user.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.672563] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican_service_user.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.672722] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican_service_user.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.672883] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican_service_user.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.673053] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] barbican_service_user.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.673234] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.approle_role_id = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.673383] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.approle_secret_id = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.673549] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.kv_mountpoint = secret {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.673708] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.kv_path = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.673870] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.kv_version = 2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.674037] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.namespace = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.674201] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.root_token_id = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.674358] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.ssl_ca_crt_file = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.674524] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.timeout = 60.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.674686] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.use_ssl = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.674851] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.675045] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.auth_section = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.675219] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.auth_type = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.675381] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.675536] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.675696] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.675854] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.connect_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.676023] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.connect_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.676199] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.endpoint_override = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.676369] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.676526] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.676682] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.max_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.676838] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.min_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.676990] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.region_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.677159] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.retriable_status_codes = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.677313] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.service_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.677479] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.service_type = identity {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.677635] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.677789] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.status_code_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.677947] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.status_code_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.678114] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.678293] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.678451] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] keystone.version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.678646] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.connection_uri = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.678805] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.cpu_mode = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.678971] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.679153] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.cpu_models = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.679322] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.cpu_power_governor_high = performance {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.679486] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.679645] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.cpu_power_management = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.679813] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.679975] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.device_detach_attempts = 8 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.680229] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.device_detach_timeout = 20 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.680406] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.disk_cachemodes = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.680567] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.disk_prefix = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.680732] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.enabled_perf_events = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.680894] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.file_backed_memory = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.681105] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.gid_maps = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.681277] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.hw_disk_discard = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.681439] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.hw_machine_type = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.681612] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.images_rbd_ceph_conf = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.681780] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.681964] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.682208] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.images_rbd_glance_store_name = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.682388] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.images_rbd_pool = rbd {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.682560] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.images_type = default {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.682720] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.images_volume_group = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.682882] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.inject_key = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.683053] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.inject_partition = -2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.683220] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.inject_password = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.683385] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.iscsi_iface = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.683544] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.iser_use_multipath = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.683706] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.683867] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.684043] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_downtime = 500 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.684216] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.684379] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.684539] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_inbound_addr = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.684699] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.684863] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.685046] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_scheme = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.685233] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_timeout_action = abort {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.685400] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_tunnelled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.685560] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_uri = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.685721] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.live_migration_with_native_tls = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.685876] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.max_queues = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.686047] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.686302] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.686468] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.nfs_mount_options = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.686768] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.686942] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.687124] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.687287] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.687451] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.687614] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.num_pcie_ports = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.687778] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.687945] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.pmem_namespaces = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.688118] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.quobyte_client_cfg = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.688407] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.688580] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.688744] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.688909] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.689091] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.rbd_secret_uuid = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.689258] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.rbd_user = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.689421] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.689594] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.689753] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.rescue_image_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.689911] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.rescue_kernel_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.690078] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.rescue_ramdisk_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.690334] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.690504] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.rx_queue_size = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.690676] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.smbfs_mount_options = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.690974] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.691171] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.snapshot_compression = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.691337] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.snapshot_image_format = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.691560] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.691728] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.sparse_logical_volumes = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.691889] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.swtpm_enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.692110] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.swtpm_group = tss {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.692295] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.swtpm_user = tss {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.692469] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.sysinfo_serial = unique {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.692628] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.tb_cache_size = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.692788] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.tx_queue_size = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.692953] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.uid_maps = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.693132] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.use_virtio_for_bridges = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.693306] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.virt_type = kvm {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.693474] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.volume_clear = zero {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.693638] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.volume_clear_size = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.693804] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.volume_use_multipath = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.693963] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.vzstorage_cache_path = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.694161] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.694332] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.694499] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.694667] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.694964] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.695157] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.vzstorage_mount_user = stack {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.695331] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.695502] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.auth_section = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.695677] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.auth_type = password {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.695840] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.695999] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.696177] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.696335] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.connect_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.696492] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.connect_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.696661] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.default_floating_pool = public {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.696817] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.endpoint_override = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.697014] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.extension_sync_interval = 600 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.697172] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.http_retries = 3 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.697305] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.697465] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.697655] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.max_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.697784] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.697943] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.min_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.698124] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.ovs_bridge = br-int {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.698315] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.physnets = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.698493] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.region_name = RegionOne {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.698654] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.retriable_status_codes = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.698831] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.service_metadata_proxy = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.698980] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.service_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.699176] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.service_type = network {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.699340] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.699499] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.status_code_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.699655] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.status_code_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.699811] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.699988] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.700165] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] neutron.version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.700417] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] notifications.bdms_in_notifications = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.700602] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] notifications.default_level = INFO {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.700779] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] notifications.notification_format = unversioned {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.700957] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] notifications.notify_on_state_change = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.701441] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.701441] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] pci.alias = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.701510] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] pci.device_spec = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.701665] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] pci.report_in_placement = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.701838] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.auth_section = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.702055] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.auth_type = password {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.702247] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.702412] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.702570] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.702733] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.702894] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.connect_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.703066] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.connect_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.703233] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.default_domain_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.703393] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.default_domain_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.703551] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.domain_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.703708] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.domain_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.703866] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.endpoint_override = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.704046] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.704218] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.704373] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.max_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.704528] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.min_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.704694] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.password = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.704852] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.project_domain_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.705029] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.project_domain_name = Default {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.705206] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.project_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.705380] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.project_name = service {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.705548] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.region_name = RegionOne {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.705711] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.retriable_status_codes = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.705869] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.service_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.706048] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.service_type = placement {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.706217] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.706374] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.status_code_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.706532] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.status_code_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.706691] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.system_scope = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.706845] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.707010] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.trust_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.707174] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.user_domain_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.707338] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.user_domain_name = Default {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.707497] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.user_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.707667] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.username = nova {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.707848] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.708017] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] placement.version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.708218] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.cores = 20 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.708410] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.count_usage_from_placement = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.708587] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.708761] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.injected_file_content_bytes = 10240 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.708932] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.injected_file_path_length = 255 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.709111] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.injected_files = 5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.709284] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.instances = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.709449] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.key_pairs = 100 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.709616] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.metadata_items = 128 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.709782] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.ram = 51200 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.709943] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.recheck_quota = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.710122] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.server_group_members = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.710289] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] quota.server_groups = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.710552] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.710725] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.710889] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler.image_metadata_prefilter = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.711090] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.711264] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler.max_attempts = 3 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.711429] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler.max_placement_results = 1000 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.711590] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.711752] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.711913] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.712144] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] scheduler.workers = 2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.712332] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.712507] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.712698] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.712869] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.713044] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.713215] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.713381] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.713569] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.713734] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.host_subset_size = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.713895] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.714100] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.714309] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.714483] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.isolated_hosts = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.714650] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.isolated_images = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.714811] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.714970] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.715147] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.715307] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.pci_in_placement = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.715467] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.715625] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.715785] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.715947] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.716137] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.716309] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.716466] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.track_instance_changes = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.716642] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.716810] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] metrics.required = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.716969] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] metrics.weight_multiplier = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.717147] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.717309] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] metrics.weight_setting = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.717625] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.717797] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] serial_console.enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.717971] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] serial_console.port_range = 10000:20000 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.718156] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.718326] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.718492] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] serial_console.serialproxy_port = 6083 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.718656] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_user.auth_section = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.718829] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_user.auth_type = password {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.718989] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_user.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.719160] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_user.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.719324] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_user.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.719484] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_user.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.719641] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_user.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.719810] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_user.send_service_user_token = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.719973] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_user.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.720174] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] service_user.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.720346] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.agent_enabled = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.720586] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.720903] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.721134] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.721311] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.html5proxy_port = 6082 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.721471] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.image_compression = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.721630] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.jpeg_compression = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.721786] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.playback_compression = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.721979] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.require_secure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.722192] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.server_listen = 127.0.0.1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.722368] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.722527] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.streaming_mode = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.722683] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] spice.zlib_compression = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.722849] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] upgrade_levels.baseapi = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.723026] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] upgrade_levels.compute = auto {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.723193] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] upgrade_levels.conductor = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.723349] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] upgrade_levels.scheduler = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.723513] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.723672] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.723828] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vendordata_dynamic_auth.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.723986] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vendordata_dynamic_auth.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.724176] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.724337] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vendordata_dynamic_auth.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.724495] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.724654] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.724811] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vendordata_dynamic_auth.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.724983] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.api_retry_count = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.725161] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.ca_file = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.725333] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.725499] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.cluster_name = testcl1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.725661] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.connection_pool_size = 10 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.725816] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.console_delay_seconds = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.725980] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.datastore_regex = ^datastore.* {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.726199] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.726370] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.host_password = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.726535] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.host_port = 443 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.726698] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.host_username = administrator@vsphere.local {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.726865] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.insecure = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.727034] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.integration_bridge = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.727206] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.maximum_objects = 100 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.727363] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.pbm_default_policy = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.727568] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.pbm_enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.727686] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.pbm_wsdl_location = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.727851] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.728015] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.serial_port_proxy_uri = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.728197] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.serial_port_service_uri = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.728363] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.task_poll_interval = 0.5 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.728531] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.use_linked_clone = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.728698] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.vnc_keymap = en-us {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.728861] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.vnc_port = 5900 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.729033] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vmware.vnc_port_total = 10000 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.729223] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vnc.auth_schemes = ['none'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.729395] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vnc.enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.729680] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.729863] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.730046] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vnc.novncproxy_port = 6080 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.730222] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vnc.server_listen = 127.0.0.1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.730391] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.730615] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vnc.vencrypt_ca_certs = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.730793] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vnc.vencrypt_client_cert = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.730973] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vnc.vencrypt_client_key = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.731181] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.731349] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.disable_deep_image_inspection = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.731511] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.731673] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.731831] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.732034] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.disable_rootwrap = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.732221] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.enable_numa_live_migration = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.732385] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.732549] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.732709] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.732868] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.libvirt_disable_apic = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.733041] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.733253] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.733546] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.733639] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.733807] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.733968] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.734192] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.734370] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.734533] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.734701] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.734887] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.735069] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.client_socket_timeout = 900 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.735242] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.default_pool_size = 1000 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.735410] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.keep_alive = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.735577] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.max_header_line = 16384 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.735736] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.735893] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.ssl_ca_file = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.736061] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.ssl_cert_file = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.736225] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.ssl_key_file = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.736388] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.tcp_keepidle = 600 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.736564] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.736728] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] zvm.ca_file = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.736886] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] zvm.cloud_connector_url = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.737215] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.737396] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] zvm.reachable_timeout = 300 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.737671] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_policy.enforce_new_defaults = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.737967] env[61852]: WARNING oslo_config.cfg [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 473.738169] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_policy.enforce_scope = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.738346] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_policy.policy_default_rule = default {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.738531] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.738707] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_policy.policy_file = policy.yaml {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.738885] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.739060] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.739228] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.739388] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.739550] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.739718] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.739893] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.740078] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.connection_string = messaging:// {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.740253] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.enabled = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.740422] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.es_doc_type = notification {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.740644] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.es_scroll_size = 10000 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.740841] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.es_scroll_time = 2m {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.741049] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.filter_error_trace = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.741241] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.hmac_keys = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.741413] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.sentinel_service_name = mymaster {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.741581] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.socket_timeout = 0.1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.741744] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.trace_requests = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.741908] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler.trace_sqlalchemy = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.742127] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler_jaeger.process_tags = {} {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.742297] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler_jaeger.service_name_prefix = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.742463] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] profiler_otlp.service_name_prefix = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.742630] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] remote_debug.host = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.742792] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] remote_debug.port = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.742971] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.743150] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.743342] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.743521] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.743687] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.743848] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.744014] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.744179] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.744340] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.744508] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.744665] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.744835] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.745007] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.745184] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.745355] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.745521] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.745683] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.745855] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.746036] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.746210] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.746377] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.746543] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.746702] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.746867] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.747036] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.747205] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.747363] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.747522] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.747770] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.747853] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.ssl = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.748018] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.748201] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.748345] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.748516] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.748681] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.748842] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.749039] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.749212] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_notifications.retry = -1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.749396] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.749571] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.749741] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.auth_section = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.749902] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.auth_type = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.750070] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.cafile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.750234] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.certfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.750394] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.collect_timing = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.750549] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.connect_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.750772] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.connect_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.750964] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.endpoint_id = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.751167] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.endpoint_override = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.751336] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.insecure = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.751492] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.keyfile = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.751648] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.max_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.751802] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.min_version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.751975] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.region_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.752163] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.retriable_status_codes = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.752324] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.service_name = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.752480] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.service_type = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.752640] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.split_loggers = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.752796] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.status_code_retries = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.752953] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.status_code_retry_delay = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.753121] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.timeout = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.753303] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.valid_interfaces = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.753474] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_limit.version = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.753639] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_reports.file_event_handler = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.753804] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.753963] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] oslo_reports.log_dir = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.754142] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.754303] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.754459] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.754624] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.754785] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.754941] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.755136] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.755300] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_ovs_privileged.group = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.755456] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.755618] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.755776] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.755931] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] vif_plug_ovs_privileged.user = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.756115] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.756295] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.756465] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.756632] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.756798] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.756962] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.757143] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.757305] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.757481] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.757650] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_ovs.isolate_vif = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.757872] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.758090] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.758298] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.758449] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.758612] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] os_vif_ovs.per_port_bridge = False {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.758783] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] privsep_osbrick.capabilities = [21] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.758945] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] privsep_osbrick.group = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.759117] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] privsep_osbrick.helper_command = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.759282] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.759442] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.759597] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] privsep_osbrick.user = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.759767] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.759922] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] nova_sys_admin.group = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.760098] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] nova_sys_admin.helper_command = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.760271] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.760432] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.760587] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] nova_sys_admin.user = None {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 473.760738] env[61852]: DEBUG oslo_service.service [None req-ee628ac2-fe84-46a3-a4bb-fdf108521343 None None] ******************************************************************************** {{(pid=61852) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 473.761290] env[61852]: INFO nova.service [-] Starting compute node (version 30.1.0) [ 474.264405] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Getting list of instances from cluster (obj){ [ 474.264405] env[61852]: value = "domain-c8" [ 474.264405] env[61852]: _type = "ClusterComputeResource" [ 474.264405] env[61852]: } {{(pid=61852) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 474.265591] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2a349e-e0e4-4e7a-beb7-2c9b6987b4e4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 474.274673] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Got total of 0 instances {{(pid=61852) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 474.275210] env[61852]: WARNING nova.virt.vmwareapi.driver [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 474.275668] env[61852]: INFO nova.virt.node [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Generated node identity f818062c-7b17-4bd0-94af-192a674543c3 [ 474.275903] env[61852]: INFO nova.virt.node [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Wrote node identity f818062c-7b17-4bd0-94af-192a674543c3 to /opt/stack/data/n-cpu-1/compute_id [ 474.778647] env[61852]: WARNING nova.compute.manager [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Compute nodes ['f818062c-7b17-4bd0-94af-192a674543c3'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 475.790322] env[61852]: INFO nova.compute.manager [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 476.799033] env[61852]: WARNING nova.compute.manager [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 476.799033] env[61852]: DEBUG oslo_concurrency.lockutils [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 476.799033] env[61852]: DEBUG oslo_concurrency.lockutils [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 476.799033] env[61852]: DEBUG oslo_concurrency.lockutils [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 476.799033] env[61852]: DEBUG nova.compute.resource_tracker [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 476.799033] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9697d372-47b5-46f7-b3e5-183356d0082c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.806543] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d8cb5f-b0e6-423a-976a-e7ced5429b45 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.820135] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cace11d0-030a-42ff-a4c3-8439a1ad07d7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.826630] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e23fca0-4c49-42db-9048-1fd4272a8d6b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.855936] env[61852]: DEBUG nova.compute.resource_tracker [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181548MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 476.856309] env[61852]: DEBUG oslo_concurrency.lockutils [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 476.856607] env[61852]: DEBUG oslo_concurrency.lockutils [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 477.364556] env[61852]: WARNING nova.compute.resource_tracker [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] No compute node record for cpu-1:f818062c-7b17-4bd0-94af-192a674543c3: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host f818062c-7b17-4bd0-94af-192a674543c3 could not be found. [ 477.868211] env[61852]: INFO nova.compute.resource_tracker [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: f818062c-7b17-4bd0-94af-192a674543c3 [ 479.378839] env[61852]: DEBUG nova.compute.resource_tracker [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 479.378839] env[61852]: DEBUG nova.compute.resource_tracker [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 479.529589] env[61852]: INFO nova.scheduler.client.report [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] [req-2ebc5fe0-a2fa-4f15-9791-0e95a4398fed] Created resource provider record via placement API for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 479.546146] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d329bf-d571-42a5-b6dd-83ef68d315a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.552729] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70be45a7-cb68-4df4-ad30-801e71dcd290 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.582195] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b330ed6-9ed4-43a3-8e72-b214483d4deb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.589413] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b59bc8-379e-4be0-bff3-3748cf413dbd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.602277] env[61852]: DEBUG nova.compute.provider_tree [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 480.138807] env[61852]: DEBUG nova.scheduler.client.report [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 480.139125] env[61852]: DEBUG nova.compute.provider_tree [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 0 to 1 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 480.139284] env[61852]: DEBUG nova.compute.provider_tree [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 480.186865] env[61852]: DEBUG nova.compute.provider_tree [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 1 to 2 during operation: update_traits {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 480.692101] env[61852]: DEBUG nova.compute.resource_tracker [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 480.692491] env[61852]: DEBUG oslo_concurrency.lockutils [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.836s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 480.692634] env[61852]: DEBUG nova.service [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Creating RPC server for service compute {{(pid=61852) start /opt/stack/nova/nova/service.py:186}} [ 480.706027] env[61852]: DEBUG nova.service [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] Join ServiceGroup membership for this service compute {{(pid=61852) start /opt/stack/nova/nova/service.py:203}} [ 480.706220] env[61852]: DEBUG nova.servicegroup.drivers.db [None req-63bd7d30-6ebf-4a65-84bd-72c45b11677e None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61852) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 516.176370] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Acquiring lock "b9f22589-1803-4688-bad1-8be6965d6c92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.176370] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Lock "b9f22589-1803-4688-bad1-8be6965d6c92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.678085] env[61852]: DEBUG nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 516.976589] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquiring lock "b31d126f-5b63-434c-a2c3-c7dc2f40c80f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.976589] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "b31d126f-5b63-434c-a2c3-c7dc2f40c80f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.223739] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.225016] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.226646] env[61852]: INFO nova.compute.claims [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 517.230800] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Acquiring lock "bb04c866-2e19-48e9-9aa5-89af0e56d735" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.231064] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Lock "bb04c866-2e19-48e9-9aa5-89af0e56d735" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.477927] env[61852]: DEBUG nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 517.745112] env[61852]: DEBUG nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 518.012085] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.271210] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.346238] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3046da-627d-4609-8c12-41c995892a87 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.363200] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4584c2-0d59-4f31-9ab6-903b709c1adb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.394196] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852559e5-1d71-419d-894d-b4374394e7d3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.401711] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f868ea7c-f5bc-4f2c-8309-65a870449545 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.416845] env[61852]: DEBUG nova.compute.provider_tree [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 518.674234] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquiring lock "e377c443-91b5-4d99-a0e8-a9731421a39e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.674555] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "e377c443-91b5-4d99-a0e8-a9731421a39e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.680487] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Acquiring lock "62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.680487] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Lock "62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.919712] env[61852]: DEBUG nova.scheduler.client.report [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 519.116764] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Acquiring lock "8084d5e2-454b-4003-a9e8-b733fd0322a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.117234] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Lock "8084d5e2-454b-4003-a9e8-b733fd0322a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.181376] env[61852]: DEBUG nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 519.186214] env[61852]: DEBUG nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 519.426540] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 519.426761] env[61852]: DEBUG nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 519.429702] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.418s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.431126] env[61852]: INFO nova.compute.claims [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 519.620212] env[61852]: DEBUG nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 519.717121] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.728463] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.939457] env[61852]: DEBUG nova.compute.utils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 519.944311] env[61852]: DEBUG nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 519.946071] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 520.151773] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.225839] env[61852]: DEBUG nova.policy [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f65a5e8f3ba3423e962c68dd354c032f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9597248cced74ef4a07fc64c195fa5fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 520.446130] env[61852]: DEBUG nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 520.606746] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4455000-c538-4073-b30e-6550975e8678 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.618653] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a11e078-4e28-45f6-8e78-76770ec75e35 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.656878] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa61e25-fde4-430d-b80d-f31c0ac41583 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.667561] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70575fe2-89db-4210-bbb8-3972156332cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.685672] env[61852]: DEBUG nova.compute.provider_tree [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 521.069531] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Successfully created port: 2d6684e9-2eca-48a5-9ee0-afd55971a3f9 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 521.190134] env[61852]: DEBUG nova.scheduler.client.report [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 521.459582] env[61852]: DEBUG nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 521.488740] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 521.489318] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 521.489318] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 521.489433] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 521.489584] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 521.489772] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 521.490070] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 521.490506] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 521.490717] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 521.490914] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 521.491110] env[61852]: DEBUG nova.virt.hardware [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 521.492113] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a43e19d-7d60-42e1-a02f-aec0e30dd599 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.503547] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c232f51-2f84-4cf6-92d0-fcd92b5c8614 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.532592] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687e21b0-87f4-4db7-9e9e-4ba714b12e52 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.704234] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.704840] env[61852]: DEBUG nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 521.710464] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.437s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.711258] env[61852]: INFO nova.compute.claims [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 522.210741] env[61852]: DEBUG nova.compute.utils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 522.215222] env[61852]: DEBUG nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 522.215509] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 522.315673] env[61852]: DEBUG nova.policy [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6271ea2f00a546cd8ed4d556f530ab5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfa77202117a445381073c35c04e557e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 522.723022] env[61852]: DEBUG nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 522.855438] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40042963-c438-47b3-87cb-ccd3a9b614e9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.863335] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356c4c6b-dc1a-4706-ab95-5b3383dd9109 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.915593] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1b6ee9-c7d7-406c-b102-5c7d5d48f8a7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.929204] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82475f83-8a1b-4f97-867f-119f8517645e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.956863] env[61852]: DEBUG nova.compute.provider_tree [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 523.460585] env[61852]: DEBUG nova.scheduler.client.report [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 523.472180] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Successfully created port: 255a96e4-ee07-4e40-97c0-76906b0e4e6d {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 523.736323] env[61852]: DEBUG nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 523.775611] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 523.775852] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 523.778226] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 523.778226] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 523.778226] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 523.778226] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 523.778226] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 523.778437] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 523.778437] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 523.778437] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 523.778437] env[61852]: DEBUG nova.virt.hardware [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 523.778437] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f5cc60-148e-4d72-a13c-c6faa0ada0b5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.790812] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3a0372-a14c-41c3-9162-89b7794706e6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.966211] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 523.966756] env[61852]: DEBUG nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 523.972261] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.255s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.974281] env[61852]: INFO nova.compute.claims [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 524.479176] env[61852]: DEBUG nova.compute.utils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 524.481517] env[61852]: DEBUG nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 524.481517] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 524.614470] env[61852]: DEBUG nova.policy [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '546e8c33a79b4bad9f8d56d1a5516ef5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '59cd0a465177410ead1ea698f0501c80', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 524.806257] env[61852]: ERROR nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2d6684e9-2eca-48a5-9ee0-afd55971a3f9, please check neutron logs for more information. [ 524.806257] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 524.806257] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 524.806257] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 524.806257] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 524.806257] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 524.806257] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 524.806257] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 524.806257] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 524.806257] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 524.806257] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 524.806257] env[61852]: ERROR nova.compute.manager raise self.value [ 524.806257] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 524.806257] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 524.806257] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 524.806257] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 524.807220] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 524.807220] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 524.807220] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2d6684e9-2eca-48a5-9ee0-afd55971a3f9, please check neutron logs for more information. [ 524.807220] env[61852]: ERROR nova.compute.manager [ 524.807220] env[61852]: Traceback (most recent call last): [ 524.807220] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 524.807220] env[61852]: listener.cb(fileno) [ 524.807220] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 524.807220] env[61852]: result = function(*args, **kwargs) [ 524.807220] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 524.807220] env[61852]: return func(*args, **kwargs) [ 524.807220] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 524.807220] env[61852]: raise e [ 524.807220] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 524.807220] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 524.807220] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 524.807220] env[61852]: created_port_ids = self._update_ports_for_instance( [ 524.807220] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 524.807220] env[61852]: with excutils.save_and_reraise_exception(): [ 524.807220] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 524.807220] env[61852]: self.force_reraise() [ 524.807220] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 524.807220] env[61852]: raise self.value [ 524.807220] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 524.807220] env[61852]: updated_port = self._update_port( [ 524.807220] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 524.807220] env[61852]: _ensure_no_port_binding_failure(port) [ 524.807220] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 524.807220] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 524.807968] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 2d6684e9-2eca-48a5-9ee0-afd55971a3f9, please check neutron logs for more information. [ 524.807968] env[61852]: Removing descriptor: 16 [ 524.808935] env[61852]: ERROR nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2d6684e9-2eca-48a5-9ee0-afd55971a3f9, please check neutron logs for more information. [ 524.808935] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Traceback (most recent call last): [ 524.808935] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 524.808935] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] yield resources [ 524.808935] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 524.808935] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] self.driver.spawn(context, instance, image_meta, [ 524.808935] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 524.808935] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] self._vmops.spawn(context, instance, image_meta, injected_files, [ 524.808935] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 524.808935] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] vm_ref = self.build_virtual_machine(instance, [ 524.808935] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] vif_infos = vmwarevif.get_vif_info(self._session, [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] for vif in network_info: [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] return self._sync_wrapper(fn, *args, **kwargs) [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] self.wait() [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] self[:] = self._gt.wait() [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] return self._exit_event.wait() [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 524.809608] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] result = hub.switch() [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] return self.greenlet.switch() [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] result = function(*args, **kwargs) [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] return func(*args, **kwargs) [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] raise e [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] nwinfo = self.network_api.allocate_for_instance( [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] created_port_ids = self._update_ports_for_instance( [ 524.810143] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] with excutils.save_and_reraise_exception(): [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] self.force_reraise() [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] raise self.value [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] updated_port = self._update_port( [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] _ensure_no_port_binding_failure(port) [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] raise exception.PortBindingFailed(port_id=port['id']) [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] nova.exception.PortBindingFailed: Binding failed for port 2d6684e9-2eca-48a5-9ee0-afd55971a3f9, please check neutron logs for more information. [ 524.810477] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] [ 524.811393] env[61852]: INFO nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Terminating instance [ 524.813807] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Acquiring lock "refresh_cache-b9f22589-1803-4688-bad1-8be6965d6c92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.817294] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Acquired lock "refresh_cache-b9f22589-1803-4688-bad1-8be6965d6c92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.817445] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 524.984423] env[61852]: DEBUG nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 525.091270] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988436e0-eb89-46b6-adab-1b70086bc93d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.102490] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af010311-6aae-4aea-9275-0fcaed16daf0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.137865] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73b8384-ee88-4f29-b5b5-4558f83d8ddb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.147057] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d9efaf-58a0-48a1-b5a0-c1a3620e74d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.162826] env[61852]: DEBUG nova.compute.provider_tree [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 525.350788] env[61852]: DEBUG nova.compute.manager [req-eb65c1fb-128f-47a7-a0f9-35eda6846117 req-dd363f1b-1593-4e2f-97d8-b751b83170ea service nova] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Received event network-changed-2d6684e9-2eca-48a5-9ee0-afd55971a3f9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 525.350986] env[61852]: DEBUG nova.compute.manager [req-eb65c1fb-128f-47a7-a0f9-35eda6846117 req-dd363f1b-1593-4e2f-97d8-b751b83170ea service nova] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Refreshing instance network info cache due to event network-changed-2d6684e9-2eca-48a5-9ee0-afd55971a3f9. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 525.351305] env[61852]: DEBUG oslo_concurrency.lockutils [req-eb65c1fb-128f-47a7-a0f9-35eda6846117 req-dd363f1b-1593-4e2f-97d8-b751b83170ea service nova] Acquiring lock "refresh_cache-b9f22589-1803-4688-bad1-8be6965d6c92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.370647] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 525.613202] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.666502] env[61852]: DEBUG nova.scheduler.client.report [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 525.779352] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Successfully created port: 50a2ab09-4218-41fd-8374-f1615bf5386a {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 525.997646] env[61852]: DEBUG nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 526.024888] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 526.025069] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 526.025264] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 526.025399] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 526.025535] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 526.025680] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 526.025883] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 526.026195] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 526.026398] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 526.026643] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 526.026752] env[61852]: DEBUG nova.virt.hardware [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 526.027622] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25af4d15-20dc-435a-b854-8c539a91c360 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.037564] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f08ac1b-9530-4cf1-a96f-d9ed3e01a74f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.116578] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Releasing lock "refresh_cache-b9f22589-1803-4688-bad1-8be6965d6c92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.117028] env[61852]: DEBUG nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 526.117234] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 526.117539] env[61852]: DEBUG oslo_concurrency.lockutils [req-eb65c1fb-128f-47a7-a0f9-35eda6846117 req-dd363f1b-1593-4e2f-97d8-b751b83170ea service nova] Acquired lock "refresh_cache-b9f22589-1803-4688-bad1-8be6965d6c92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 526.117697] env[61852]: DEBUG nova.network.neutron [req-eb65c1fb-128f-47a7-a0f9-35eda6846117 req-dd363f1b-1593-4e2f-97d8-b751b83170ea service nova] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Refreshing network info cache for port 2d6684e9-2eca-48a5-9ee0-afd55971a3f9 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 526.118842] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a61aecd-7ab2-4828-ab82-a9fde67bb2b8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.130109] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d4cf42-91cb-4b7b-9a43-83fedc393e0b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.155317] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b9f22589-1803-4688-bad1-8be6965d6c92 could not be found. [ 526.155557] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 526.155972] env[61852]: INFO nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Took 0.04 seconds to destroy the instance on the hypervisor. [ 526.156318] env[61852]: DEBUG oslo.service.loopingcall [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 526.159630] env[61852]: DEBUG nova.compute.manager [-] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 526.160206] env[61852]: DEBUG nova.network.neutron [-] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 526.177684] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.204s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 526.178270] env[61852]: DEBUG nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 526.182305] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.454s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.184644] env[61852]: INFO nova.compute.claims [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 526.217806] env[61852]: DEBUG nova.network.neutron [-] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 526.581024] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "d75e131b-1933-4e1f-bcf1-62ed83779177" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.581024] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "d75e131b-1933-4e1f-bcf1-62ed83779177" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.675715] env[61852]: DEBUG nova.network.neutron [req-eb65c1fb-128f-47a7-a0f9-35eda6846117 req-dd363f1b-1593-4e2f-97d8-b751b83170ea service nova] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 526.684732] env[61852]: DEBUG nova.compute.utils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 526.686118] env[61852]: DEBUG nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 526.687650] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 526.721970] env[61852]: DEBUG nova.network.neutron [-] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.915919] env[61852]: DEBUG nova.policy [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed0fe0a1db944aec867b5708d47c70fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b94053fbb904520936503b11c5e2129', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 527.047404] env[61852]: ERROR nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 255a96e4-ee07-4e40-97c0-76906b0e4e6d, please check neutron logs for more information. [ 527.047404] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 527.047404] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 527.047404] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 527.047404] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 527.047404] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 527.047404] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 527.047404] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 527.047404] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.047404] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 527.047404] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.047404] env[61852]: ERROR nova.compute.manager raise self.value [ 527.047404] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 527.047404] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 527.047404] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.047404] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 527.048026] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.048026] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 527.048026] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 255a96e4-ee07-4e40-97c0-76906b0e4e6d, please check neutron logs for more information. [ 527.048026] env[61852]: ERROR nova.compute.manager [ 527.048026] env[61852]: Traceback (most recent call last): [ 527.048026] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 527.048026] env[61852]: listener.cb(fileno) [ 527.048026] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 527.048026] env[61852]: result = function(*args, **kwargs) [ 527.048026] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 527.048026] env[61852]: return func(*args, **kwargs) [ 527.048026] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 527.048026] env[61852]: raise e [ 527.048026] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 527.048026] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 527.048026] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 527.048026] env[61852]: created_port_ids = self._update_ports_for_instance( [ 527.048026] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 527.048026] env[61852]: with excutils.save_and_reraise_exception(): [ 527.048026] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.048026] env[61852]: self.force_reraise() [ 527.048026] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.048026] env[61852]: raise self.value [ 527.048026] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 527.048026] env[61852]: updated_port = self._update_port( [ 527.048026] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.048026] env[61852]: _ensure_no_port_binding_failure(port) [ 527.048026] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.048026] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 527.048877] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 255a96e4-ee07-4e40-97c0-76906b0e4e6d, please check neutron logs for more information. [ 527.048877] env[61852]: Removing descriptor: 17 [ 527.048877] env[61852]: ERROR nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 255a96e4-ee07-4e40-97c0-76906b0e4e6d, please check neutron logs for more information. [ 527.048877] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Traceback (most recent call last): [ 527.048877] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 527.048877] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] yield resources [ 527.048877] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 527.048877] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] self.driver.spawn(context, instance, image_meta, [ 527.048877] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 527.048877] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 527.048877] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 527.048877] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] vm_ref = self.build_virtual_machine(instance, [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] vif_infos = vmwarevif.get_vif_info(self._session, [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] for vif in network_info: [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] return self._sync_wrapper(fn, *args, **kwargs) [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] self.wait() [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] self[:] = self._gt.wait() [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] return self._exit_event.wait() [ 527.049183] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] result = hub.switch() [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] return self.greenlet.switch() [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] result = function(*args, **kwargs) [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] return func(*args, **kwargs) [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] raise e [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] nwinfo = self.network_api.allocate_for_instance( [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 527.049504] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] created_port_ids = self._update_ports_for_instance( [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] with excutils.save_and_reraise_exception(): [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] self.force_reraise() [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] raise self.value [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] updated_port = self._update_port( [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] _ensure_no_port_binding_failure(port) [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.049856] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] raise exception.PortBindingFailed(port_id=port['id']) [ 527.050175] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] nova.exception.PortBindingFailed: Binding failed for port 255a96e4-ee07-4e40-97c0-76906b0e4e6d, please check neutron logs for more information. [ 527.050175] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] [ 527.050175] env[61852]: INFO nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Terminating instance [ 527.051373] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquiring lock "refresh_cache-b31d126f-5b63-434c-a2c3-c7dc2f40c80f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.051605] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquired lock "refresh_cache-b31d126f-5b63-434c-a2c3-c7dc2f40c80f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.051678] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 527.058641] env[61852]: DEBUG nova.network.neutron [req-eb65c1fb-128f-47a7-a0f9-35eda6846117 req-dd363f1b-1593-4e2f-97d8-b751b83170ea service nova] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.083829] env[61852]: DEBUG nova.compute.manager [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 527.193286] env[61852]: DEBUG nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 527.226662] env[61852]: INFO nova.compute.manager [-] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Took 1.07 seconds to deallocate network for instance. [ 527.232933] env[61852]: DEBUG nova.compute.claims [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 527.233741] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.349240] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d592ad-493d-4df7-8e1a-cb79aa952d7f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.357204] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fcd68e-f2d6-4ee4-97c7-ef8627dc3bfc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.390290] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230cf616-c8b8-4284-a7dd-613fa7fe4dee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.397735] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b534788b-5d1a-46a0-adce-c24f8d48566f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.412669] env[61852]: DEBUG nova.compute.provider_tree [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 527.562647] env[61852]: DEBUG oslo_concurrency.lockutils [req-eb65c1fb-128f-47a7-a0f9-35eda6846117 req-dd363f1b-1593-4e2f-97d8-b751b83170ea service nova] Releasing lock "refresh_cache-b9f22589-1803-4688-bad1-8be6965d6c92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 527.606692] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 527.623420] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.818348] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.920489] env[61852]: DEBUG nova.scheduler.client.report [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 528.003088] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Successfully created port: 9d81a11d-ffc6-477b-8822-d2ca7437b877 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 528.208563] env[61852]: DEBUG nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 528.241242] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 528.241538] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 528.241692] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 528.241934] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 528.244717] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 528.244894] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 528.245111] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 528.245272] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 528.245433] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 528.245589] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 528.245755] env[61852]: DEBUG nova.virt.hardware [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 528.246650] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d21d929-0e5a-46d4-8912-8afb935eb11a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.255596] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e1fa5f-b72b-4a68-9f64-251be22dca98 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.330889] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Releasing lock "refresh_cache-b31d126f-5b63-434c-a2c3-c7dc2f40c80f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.330889] env[61852]: DEBUG nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 528.330889] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 528.331081] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-585423b3-dbf3-41fc-8d6e-1d3a409a4314 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.339668] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06ddfe2-a84a-4eb8-bac6-cf4eebc326af {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.360119] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b31d126f-5b63-434c-a2c3-c7dc2f40c80f could not be found. [ 528.360363] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 528.360544] env[61852]: INFO nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 528.360780] env[61852]: DEBUG oslo.service.loopingcall [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 528.360975] env[61852]: DEBUG nova.compute.manager [-] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 528.361080] env[61852]: DEBUG nova.network.neutron [-] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 528.419286] env[61852]: DEBUG nova.network.neutron [-] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 528.429111] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.243s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 528.429111] env[61852]: DEBUG nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 528.433041] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.280s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.433041] env[61852]: INFO nova.compute.claims [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 528.610154] env[61852]: DEBUG nova.compute.manager [req-63a217eb-e23c-412f-b732-811f58a80288 req-b482f157-fcc9-45f7-bba5-dbc5ef4b759b service nova] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Received event network-vif-deleted-2d6684e9-2eca-48a5-9ee0-afd55971a3f9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 528.610366] env[61852]: DEBUG nova.compute.manager [req-63a217eb-e23c-412f-b732-811f58a80288 req-b482f157-fcc9-45f7-bba5-dbc5ef4b759b service nova] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Received event network-changed-255a96e4-ee07-4e40-97c0-76906b0e4e6d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 528.610525] env[61852]: DEBUG nova.compute.manager [req-63a217eb-e23c-412f-b732-811f58a80288 req-b482f157-fcc9-45f7-bba5-dbc5ef4b759b service nova] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Refreshing instance network info cache due to event network-changed-255a96e4-ee07-4e40-97c0-76906b0e4e6d. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 528.610729] env[61852]: DEBUG oslo_concurrency.lockutils [req-63a217eb-e23c-412f-b732-811f58a80288 req-b482f157-fcc9-45f7-bba5-dbc5ef4b759b service nova] Acquiring lock "refresh_cache-b31d126f-5b63-434c-a2c3-c7dc2f40c80f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.610874] env[61852]: DEBUG oslo_concurrency.lockutils [req-63a217eb-e23c-412f-b732-811f58a80288 req-b482f157-fcc9-45f7-bba5-dbc5ef4b759b service nova] Acquired lock "refresh_cache-b31d126f-5b63-434c-a2c3-c7dc2f40c80f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.611653] env[61852]: DEBUG nova.network.neutron [req-63a217eb-e23c-412f-b732-811f58a80288 req-b482f157-fcc9-45f7-bba5-dbc5ef4b759b service nova] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Refreshing network info cache for port 255a96e4-ee07-4e40-97c0-76906b0e4e6d {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 528.708687] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.895301] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "f112b2be-fbd7-4a01-b369-25fe490e4204" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.895301] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "f112b2be-fbd7-4a01-b369-25fe490e4204" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.921428] env[61852]: DEBUG nova.network.neutron [-] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.940540] env[61852]: DEBUG nova.compute.utils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 528.948018] env[61852]: DEBUG nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 528.948247] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 529.186580] env[61852]: DEBUG nova.network.neutron [req-63a217eb-e23c-412f-b732-811f58a80288 req-b482f157-fcc9-45f7-bba5-dbc5ef4b759b service nova] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 529.214026] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Getting list of instances from cluster (obj){ [ 529.214026] env[61852]: value = "domain-c8" [ 529.214026] env[61852]: _type = "ClusterComputeResource" [ 529.214026] env[61852]: } {{(pid=61852) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 529.216640] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c3e54d-8c19-4419-b05b-21aba0b9c4f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.228763] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Got total of 0 instances {{(pid=61852) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 529.228870] env[61852]: WARNING nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] While synchronizing instance power states, found 6 instances in the database and 0 instances on the hypervisor. [ 529.229042] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Triggering sync for uuid b9f22589-1803-4688-bad1-8be6965d6c92 {{(pid=61852) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 529.229328] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Triggering sync for uuid b31d126f-5b63-434c-a2c3-c7dc2f40c80f {{(pid=61852) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 529.229406] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Triggering sync for uuid bb04c866-2e19-48e9-9aa5-89af0e56d735 {{(pid=61852) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 529.229498] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Triggering sync for uuid 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc {{(pid=61852) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 529.229648] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Triggering sync for uuid e377c443-91b5-4d99-a0e8-a9731421a39e {{(pid=61852) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 529.229936] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Triggering sync for uuid 8084d5e2-454b-4003-a9e8-b733fd0322a3 {{(pid=61852) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 529.231804] env[61852]: DEBUG nova.policy [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38ef9094e67a42d193ac8bf04ed81f54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e75cfa60e8a9488687aad0e2e9a39df5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 529.233419] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "b9f22589-1803-4688-bad1-8be6965d6c92" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.233882] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "b31d126f-5b63-434c-a2c3-c7dc2f40c80f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.233882] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "bb04c866-2e19-48e9-9aa5-89af0e56d735" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.234517] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.234829] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "e377c443-91b5-4d99-a0e8-a9731421a39e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.235033] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "8084d5e2-454b-4003-a9e8-b733fd0322a3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.235181] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 529.235520] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 529.235562] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 529.293032] env[61852]: ERROR nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 50a2ab09-4218-41fd-8374-f1615bf5386a, please check neutron logs for more information. [ 529.293032] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 529.293032] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.293032] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 529.293032] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 529.293032] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 529.293032] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 529.293032] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 529.293032] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.293032] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 529.293032] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.293032] env[61852]: ERROR nova.compute.manager raise self.value [ 529.293032] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 529.293032] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 529.293032] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.293032] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 529.293540] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.293540] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 529.293540] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 50a2ab09-4218-41fd-8374-f1615bf5386a, please check neutron logs for more information. [ 529.293540] env[61852]: ERROR nova.compute.manager [ 529.293540] env[61852]: Traceback (most recent call last): [ 529.293540] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 529.293540] env[61852]: listener.cb(fileno) [ 529.293540] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.293540] env[61852]: result = function(*args, **kwargs) [ 529.293540] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.293540] env[61852]: return func(*args, **kwargs) [ 529.293540] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.293540] env[61852]: raise e [ 529.293540] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.293540] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 529.293540] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 529.293540] env[61852]: created_port_ids = self._update_ports_for_instance( [ 529.293540] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 529.293540] env[61852]: with excutils.save_and_reraise_exception(): [ 529.293540] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.293540] env[61852]: self.force_reraise() [ 529.293540] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.293540] env[61852]: raise self.value [ 529.293540] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 529.293540] env[61852]: updated_port = self._update_port( [ 529.293540] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.293540] env[61852]: _ensure_no_port_binding_failure(port) [ 529.293540] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.293540] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 529.294318] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 50a2ab09-4218-41fd-8374-f1615bf5386a, please check neutron logs for more information. [ 529.294318] env[61852]: Removing descriptor: 18 [ 529.294318] env[61852]: ERROR nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 50a2ab09-4218-41fd-8374-f1615bf5386a, please check neutron logs for more information. [ 529.294318] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Traceback (most recent call last): [ 529.294318] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 529.294318] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] yield resources [ 529.294318] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 529.294318] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] self.driver.spawn(context, instance, image_meta, [ 529.294318] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 529.294318] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] self._vmops.spawn(context, instance, image_meta, injected_files, [ 529.294318] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 529.294318] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] vm_ref = self.build_virtual_machine(instance, [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] vif_infos = vmwarevif.get_vif_info(self._session, [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] for vif in network_info: [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] return self._sync_wrapper(fn, *args, **kwargs) [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] self.wait() [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] self[:] = self._gt.wait() [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] return self._exit_event.wait() [ 529.295533] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] result = hub.switch() [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] return self.greenlet.switch() [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] result = function(*args, **kwargs) [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] return func(*args, **kwargs) [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] raise e [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] nwinfo = self.network_api.allocate_for_instance( [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 529.295892] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] created_port_ids = self._update_ports_for_instance( [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] with excutils.save_and_reraise_exception(): [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] self.force_reraise() [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] raise self.value [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] updated_port = self._update_port( [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] _ensure_no_port_binding_failure(port) [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.296257] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] raise exception.PortBindingFailed(port_id=port['id']) [ 529.296640] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] nova.exception.PortBindingFailed: Binding failed for port 50a2ab09-4218-41fd-8374-f1615bf5386a, please check neutron logs for more information. [ 529.296640] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] [ 529.296640] env[61852]: INFO nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Terminating instance [ 529.297540] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Acquiring lock "refresh_cache-bb04c866-2e19-48e9-9aa5-89af0e56d735" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.297781] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Acquired lock "refresh_cache-bb04c866-2e19-48e9-9aa5-89af0e56d735" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.298108] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 529.398805] env[61852]: DEBUG nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 529.423674] env[61852]: INFO nova.compute.manager [-] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Took 1.06 seconds to deallocate network for instance. [ 529.429158] env[61852]: DEBUG nova.compute.claims [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 529.429840] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.448963] env[61852]: DEBUG nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 529.467568] env[61852]: DEBUG nova.network.neutron [req-63a217eb-e23c-412f-b732-811f58a80288 req-b482f157-fcc9-45f7-bba5-dbc5ef4b759b service nova] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.738564] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.740457] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c09cc7a-0104-40e9-8525-23df89c6f4e8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.750320] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750c161c-a1bf-4830-851d-8c83f0c36acb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.786898] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ffaa3f-1bec-4b8c-8bf2-2b3ed9e0f8c8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.795693] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046c1989-b71c-40bd-90b3-aec925b4b299 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.811956] env[61852]: DEBUG nova.compute.provider_tree [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 529.870107] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 529.931522] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.970840] env[61852]: DEBUG oslo_concurrency.lockutils [req-63a217eb-e23c-412f-b732-811f58a80288 req-b482f157-fcc9-45f7-bba5-dbc5ef4b759b service nova] Releasing lock "refresh_cache-b31d126f-5b63-434c-a2c3-c7dc2f40c80f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.971290] env[61852]: DEBUG nova.compute.manager [req-63a217eb-e23c-412f-b732-811f58a80288 req-b482f157-fcc9-45f7-bba5-dbc5ef4b759b service nova] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Received event network-vif-deleted-255a96e4-ee07-4e40-97c0-76906b0e4e6d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 530.248791] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.315665] env[61852]: DEBUG nova.scheduler.client.report [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 530.461404] env[61852]: DEBUG nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 530.493434] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 530.493917] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 530.494878] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 530.494878] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 530.495049] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 530.495184] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 530.495545] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 530.495774] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 530.496104] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 530.496341] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 530.496591] env[61852]: DEBUG nova.virt.hardware [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 530.497691] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cfb1dd-c364-4d32-b39d-6be4249bbbad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.508835] env[61852]: DEBUG nova.compute.manager [req-335dc931-adbf-45b7-8521-35a91c760bc7 req-52e4f0bb-6c50-42e3-b6a2-69dd54a01ce7 service nova] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Received event network-changed-50a2ab09-4218-41fd-8374-f1615bf5386a {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 530.509043] env[61852]: DEBUG nova.compute.manager [req-335dc931-adbf-45b7-8521-35a91c760bc7 req-52e4f0bb-6c50-42e3-b6a2-69dd54a01ce7 service nova] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Refreshing instance network info cache due to event network-changed-50a2ab09-4218-41fd-8374-f1615bf5386a. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 530.509324] env[61852]: DEBUG oslo_concurrency.lockutils [req-335dc931-adbf-45b7-8521-35a91c760bc7 req-52e4f0bb-6c50-42e3-b6a2-69dd54a01ce7 service nova] Acquiring lock "refresh_cache-bb04c866-2e19-48e9-9aa5-89af0e56d735" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.517661] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fb2c0c-3cfd-4d21-a970-d6d40a137dc0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.757705] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Releasing lock "refresh_cache-bb04c866-2e19-48e9-9aa5-89af0e56d735" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.758143] env[61852]: DEBUG nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 530.758573] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 530.763080] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "eea17bb5-01e3-4144-a579-2a56be8154c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.763358] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "eea17bb5-01e3-4144-a579-2a56be8154c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.765483] env[61852]: DEBUG oslo_concurrency.lockutils [req-335dc931-adbf-45b7-8521-35a91c760bc7 req-52e4f0bb-6c50-42e3-b6a2-69dd54a01ce7 service nova] Acquired lock "refresh_cache-bb04c866-2e19-48e9-9aa5-89af0e56d735" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.765748] env[61852]: DEBUG nova.network.neutron [req-335dc931-adbf-45b7-8521-35a91c760bc7 req-52e4f0bb-6c50-42e3-b6a2-69dd54a01ce7 service nova] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Refreshing network info cache for port 50a2ab09-4218-41fd-8374-f1615bf5386a {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 530.766615] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-340474dc-7de0-4a27-8a23-9389b60f0162 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.779957] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4b5475-177f-4914-bc15-2949090af88d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.796862] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "c0d84943-8398-401d-ac7b-f4436bb8325f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.797565] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "c0d84943-8398-401d-ac7b-f4436bb8325f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.807549] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bb04c866-2e19-48e9-9aa5-89af0e56d735 could not be found. [ 530.807549] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 530.807729] env[61852]: INFO nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Took 0.05 seconds to destroy the instance on the hypervisor. [ 530.808844] env[61852]: DEBUG oslo.service.loopingcall [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 530.808844] env[61852]: DEBUG nova.compute.manager [-] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 530.808844] env[61852]: DEBUG nova.network.neutron [-] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 530.826669] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 530.827181] env[61852]: DEBUG nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 530.831608] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 3.598s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.845749] env[61852]: DEBUG nova.network.neutron [-] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 530.897595] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Successfully created port: e413caaf-18fa-4232-9c03-9dae4bc25546 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 531.270437] env[61852]: DEBUG nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 531.303174] env[61852]: DEBUG nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 531.309799] env[61852]: DEBUG nova.network.neutron [req-335dc931-adbf-45b7-8521-35a91c760bc7 req-52e4f0bb-6c50-42e3-b6a2-69dd54a01ce7 service nova] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 531.345318] env[61852]: DEBUG nova.compute.utils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 531.345781] env[61852]: DEBUG nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 531.346824] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 531.350022] env[61852]: DEBUG nova.network.neutron [-] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.587362] env[61852]: DEBUG nova.network.neutron [req-335dc931-adbf-45b7-8521-35a91c760bc7 req-52e4f0bb-6c50-42e3-b6a2-69dd54a01ce7 service nova] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.626705] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03ea686-fd9d-4be8-b3bb-8a67383baf1e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.637503] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd99ca1-de98-48f7-a862-8eb709d10411 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.676672] env[61852]: DEBUG nova.policy [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cf85bc0f1542438184bb61f0f2d5b6f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1aa9a161d18249abb6da9ec11c0efde0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 531.679023] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740c4f82-c919-493d-9996-b74cb0a4eafb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.688320] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f24246-1075-4919-b99c-df2884d41a7b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.708110] env[61852]: DEBUG nova.compute.provider_tree [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 531.808133] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.833559] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.849129] env[61852]: DEBUG nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 531.857613] env[61852]: INFO nova.compute.manager [-] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Took 1.05 seconds to deallocate network for instance. [ 531.860627] env[61852]: DEBUG nova.compute.claims [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 531.860903] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.090542] env[61852]: DEBUG oslo_concurrency.lockutils [req-335dc931-adbf-45b7-8521-35a91c760bc7 req-52e4f0bb-6c50-42e3-b6a2-69dd54a01ce7 service nova] Releasing lock "refresh_cache-bb04c866-2e19-48e9-9aa5-89af0e56d735" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.213549] env[61852]: DEBUG nova.scheduler.client.report [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 532.518014] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Acquiring lock "a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.518014] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Lock "a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.710540] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Acquiring lock "d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.710764] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Lock "d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.726815] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.895s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.727470] env[61852]: ERROR nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2d6684e9-2eca-48a5-9ee0-afd55971a3f9, please check neutron logs for more information. [ 532.727470] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Traceback (most recent call last): [ 532.727470] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 532.727470] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] self.driver.spawn(context, instance, image_meta, [ 532.727470] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 532.727470] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] self._vmops.spawn(context, instance, image_meta, injected_files, [ 532.727470] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 532.727470] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] vm_ref = self.build_virtual_machine(instance, [ 532.727470] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 532.727470] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] vif_infos = vmwarevif.get_vif_info(self._session, [ 532.727470] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] for vif in network_info: [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] return self._sync_wrapper(fn, *args, **kwargs) [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] self.wait() [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] self[:] = self._gt.wait() [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] return self._exit_event.wait() [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] result = hub.switch() [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 532.728724] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] return self.greenlet.switch() [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] result = function(*args, **kwargs) [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] return func(*args, **kwargs) [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] raise e [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] nwinfo = self.network_api.allocate_for_instance( [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] created_port_ids = self._update_ports_for_instance( [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] with excutils.save_and_reraise_exception(): [ 532.729109] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] self.force_reraise() [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] raise self.value [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] updated_port = self._update_port( [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] _ensure_no_port_binding_failure(port) [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] raise exception.PortBindingFailed(port_id=port['id']) [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] nova.exception.PortBindingFailed: Binding failed for port 2d6684e9-2eca-48a5-9ee0-afd55971a3f9, please check neutron logs for more information. [ 532.729552] env[61852]: ERROR nova.compute.manager [instance: b9f22589-1803-4688-bad1-8be6965d6c92] [ 532.729851] env[61852]: DEBUG nova.compute.utils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Binding failed for port 2d6684e9-2eca-48a5-9ee0-afd55971a3f9, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 532.730130] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.107s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.731685] env[61852]: INFO nova.compute.claims [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 532.736031] env[61852]: DEBUG nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Build of instance b9f22589-1803-4688-bad1-8be6965d6c92 was re-scheduled: Binding failed for port 2d6684e9-2eca-48a5-9ee0-afd55971a3f9, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 532.736031] env[61852]: DEBUG nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 532.736198] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Acquiring lock "refresh_cache-b9f22589-1803-4688-bad1-8be6965d6c92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.736305] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Acquired lock "refresh_cache-b9f22589-1803-4688-bad1-8be6965d6c92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.736526] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 532.754685] env[61852]: ERROR nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9d81a11d-ffc6-477b-8822-d2ca7437b877, please check neutron logs for more information. [ 532.754685] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 532.754685] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.754685] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 532.754685] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.754685] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 532.754685] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.754685] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 532.754685] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.754685] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 532.754685] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.754685] env[61852]: ERROR nova.compute.manager raise self.value [ 532.754685] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.754685] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 532.754685] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.754685] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 532.755137] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.755137] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 532.755137] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9d81a11d-ffc6-477b-8822-d2ca7437b877, please check neutron logs for more information. [ 532.755137] env[61852]: ERROR nova.compute.manager [ 532.755137] env[61852]: Traceback (most recent call last): [ 532.755137] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 532.755137] env[61852]: listener.cb(fileno) [ 532.755137] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.755137] env[61852]: result = function(*args, **kwargs) [ 532.755137] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 532.755137] env[61852]: return func(*args, **kwargs) [ 532.755137] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 532.755137] env[61852]: raise e [ 532.755137] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.755137] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 532.755137] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.755137] env[61852]: created_port_ids = self._update_ports_for_instance( [ 532.755137] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.755137] env[61852]: with excutils.save_and_reraise_exception(): [ 532.755137] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.755137] env[61852]: self.force_reraise() [ 532.755137] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.755137] env[61852]: raise self.value [ 532.755137] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.755137] env[61852]: updated_port = self._update_port( [ 532.755137] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.755137] env[61852]: _ensure_no_port_binding_failure(port) [ 532.755137] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.755137] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 532.756071] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 9d81a11d-ffc6-477b-8822-d2ca7437b877, please check neutron logs for more information. [ 532.756071] env[61852]: Removing descriptor: 19 [ 532.756071] env[61852]: ERROR nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9d81a11d-ffc6-477b-8822-d2ca7437b877, please check neutron logs for more information. [ 532.756071] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Traceback (most recent call last): [ 532.756071] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 532.756071] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] yield resources [ 532.756071] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 532.756071] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] self.driver.spawn(context, instance, image_meta, [ 532.756071] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 532.756071] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 532.756071] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 532.756071] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] vm_ref = self.build_virtual_machine(instance, [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] for vif in network_info: [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] return self._sync_wrapper(fn, *args, **kwargs) [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] self.wait() [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] self[:] = self._gt.wait() [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] return self._exit_event.wait() [ 532.756417] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] result = hub.switch() [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] return self.greenlet.switch() [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] result = function(*args, **kwargs) [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] return func(*args, **kwargs) [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] raise e [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] nwinfo = self.network_api.allocate_for_instance( [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.756812] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] created_port_ids = self._update_ports_for_instance( [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] with excutils.save_and_reraise_exception(): [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] self.force_reraise() [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] raise self.value [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] updated_port = self._update_port( [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] _ensure_no_port_binding_failure(port) [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.757296] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] raise exception.PortBindingFailed(port_id=port['id']) [ 532.757629] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] nova.exception.PortBindingFailed: Binding failed for port 9d81a11d-ffc6-477b-8822-d2ca7437b877, please check neutron logs for more information. [ 532.757629] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] [ 532.757629] env[61852]: INFO nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Terminating instance [ 532.760963] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Acquiring lock "refresh_cache-62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.760963] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Acquired lock "refresh_cache-62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.760963] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 532.865195] env[61852]: DEBUG nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 532.918469] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 532.918730] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 532.918901] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 532.923023] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 532.923023] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 532.923023] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 532.923023] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 532.923023] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 532.923434] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 532.923434] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 532.923434] env[61852]: DEBUG nova.virt.hardware [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 532.923434] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64df309-6041-48f1-8b78-5089e67b91bb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.930779] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8f96ba-ab46-4c0c-b52c-7ff13f70c575 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.062059] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Successfully created port: de358c72-6a90-452b-8bba-7f78a88b4b17 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 533.283229] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.346917] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.498424] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.498935] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.511218] env[61852]: DEBUG nova.compute.manager [req-cfd7cf3c-e196-4e2a-9795-351297ab8fc0 req-96509dd5-6faa-428b-a98e-4ea584b39fcd service nova] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Received event network-changed-9d81a11d-ffc6-477b-8822-d2ca7437b877 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 533.511218] env[61852]: DEBUG nova.compute.manager [req-cfd7cf3c-e196-4e2a-9795-351297ab8fc0 req-96509dd5-6faa-428b-a98e-4ea584b39fcd service nova] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Refreshing instance network info cache due to event network-changed-9d81a11d-ffc6-477b-8822-d2ca7437b877. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 533.511218] env[61852]: DEBUG oslo_concurrency.lockutils [req-cfd7cf3c-e196-4e2a-9795-351297ab8fc0 req-96509dd5-6faa-428b-a98e-4ea584b39fcd service nova] Acquiring lock "refresh_cache-62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.646188] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.755802] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.003924] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473bd827-8251-467a-84f7-7afefece1673 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.013640] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e73b30-30cc-41d9-9386-b06104d36c81 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.046887] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674a9bc2-330d-49aa-9724-80913203fe46 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.055489] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69940c5f-174e-4b31-ad46-0189007e59d7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.070928] env[61852]: DEBUG nova.compute.provider_tree [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.153764] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Releasing lock "refresh_cache-62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.154240] env[61852]: DEBUG nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 534.154435] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 534.154727] env[61852]: DEBUG oslo_concurrency.lockutils [req-cfd7cf3c-e196-4e2a-9795-351297ab8fc0 req-96509dd5-6faa-428b-a98e-4ea584b39fcd service nova] Acquired lock "refresh_cache-62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.154898] env[61852]: DEBUG nova.network.neutron [req-cfd7cf3c-e196-4e2a-9795-351297ab8fc0 req-96509dd5-6faa-428b-a98e-4ea584b39fcd service nova] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Refreshing network info cache for port 9d81a11d-ffc6-477b-8822-d2ca7437b877 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 534.156043] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d079397d-0663-4d4b-a33e-dd6b3a3dd88e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.167684] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc110c65-0122-4f90-8ea5-0578305db4d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.193748] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc could not be found. [ 534.194543] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 534.194869] env[61852]: INFO nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 534.195230] env[61852]: DEBUG oslo.service.loopingcall [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 534.195850] env[61852]: DEBUG nova.compute.manager [-] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 534.196051] env[61852]: DEBUG nova.network.neutron [-] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 534.223790] env[61852]: DEBUG nova.network.neutron [-] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.258136] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Releasing lock "refresh_cache-b9f22589-1803-4688-bad1-8be6965d6c92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.258390] env[61852]: DEBUG nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 534.258840] env[61852]: DEBUG nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 534.258840] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 534.290709] env[61852]: ERROR nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e413caaf-18fa-4232-9c03-9dae4bc25546, please check neutron logs for more information. [ 534.290709] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 534.290709] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.290709] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 534.290709] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.290709] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 534.290709] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.290709] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 534.290709] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.290709] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 534.290709] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.290709] env[61852]: ERROR nova.compute.manager raise self.value [ 534.290709] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.290709] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 534.290709] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.290709] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 534.291153] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.291153] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 534.291153] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e413caaf-18fa-4232-9c03-9dae4bc25546, please check neutron logs for more information. [ 534.291153] env[61852]: ERROR nova.compute.manager [ 534.291153] env[61852]: Traceback (most recent call last): [ 534.291153] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 534.291153] env[61852]: listener.cb(fileno) [ 534.291153] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.291153] env[61852]: result = function(*args, **kwargs) [ 534.291153] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 534.291153] env[61852]: return func(*args, **kwargs) [ 534.291153] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 534.291153] env[61852]: raise e [ 534.291153] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.291153] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 534.291153] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.291153] env[61852]: created_port_ids = self._update_ports_for_instance( [ 534.291153] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.291153] env[61852]: with excutils.save_and_reraise_exception(): [ 534.291153] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.291153] env[61852]: self.force_reraise() [ 534.291153] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.291153] env[61852]: raise self.value [ 534.291153] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.291153] env[61852]: updated_port = self._update_port( [ 534.291153] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.291153] env[61852]: _ensure_no_port_binding_failure(port) [ 534.291153] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.291153] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 534.292009] env[61852]: nova.exception.PortBindingFailed: Binding failed for port e413caaf-18fa-4232-9c03-9dae4bc25546, please check neutron logs for more information. [ 534.292009] env[61852]: Removing descriptor: 16 [ 534.292009] env[61852]: ERROR nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e413caaf-18fa-4232-9c03-9dae4bc25546, please check neutron logs for more information. [ 534.292009] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Traceback (most recent call last): [ 534.292009] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 534.292009] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] yield resources [ 534.292009] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 534.292009] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] self.driver.spawn(context, instance, image_meta, [ 534.292009] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 534.292009] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 534.292009] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 534.292009] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] vm_ref = self.build_virtual_machine(instance, [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] vif_infos = vmwarevif.get_vif_info(self._session, [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] for vif in network_info: [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] return self._sync_wrapper(fn, *args, **kwargs) [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] self.wait() [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] self[:] = self._gt.wait() [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] return self._exit_event.wait() [ 534.292312] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] result = hub.switch() [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] return self.greenlet.switch() [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] result = function(*args, **kwargs) [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] return func(*args, **kwargs) [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] raise e [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] nwinfo = self.network_api.allocate_for_instance( [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.292631] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] created_port_ids = self._update_ports_for_instance( [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] with excutils.save_and_reraise_exception(): [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] self.force_reraise() [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] raise self.value [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] updated_port = self._update_port( [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] _ensure_no_port_binding_failure(port) [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.293048] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] raise exception.PortBindingFailed(port_id=port['id']) [ 534.293373] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] nova.exception.PortBindingFailed: Binding failed for port e413caaf-18fa-4232-9c03-9dae4bc25546, please check neutron logs for more information. [ 534.293373] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] [ 534.293373] env[61852]: INFO nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Terminating instance [ 534.296530] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.300615] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquiring lock "refresh_cache-e377c443-91b5-4d99-a0e8-a9731421a39e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.300615] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquired lock "refresh_cache-e377c443-91b5-4d99-a0e8-a9731421a39e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.300615] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 534.392980] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquiring lock "4ce41dca-63c6-447d-9c0a-00f9966e0093" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.392980] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lock "4ce41dca-63c6-447d-9c0a-00f9966e0093" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.574082] env[61852]: DEBUG nova.scheduler.client.report [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 534.690371] env[61852]: DEBUG nova.network.neutron [req-cfd7cf3c-e196-4e2a-9795-351297ab8fc0 req-96509dd5-6faa-428b-a98e-4ea584b39fcd service nova] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.728961] env[61852]: DEBUG nova.network.neutron [-] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.750220] env[61852]: DEBUG nova.network.neutron [req-cfd7cf3c-e196-4e2a-9795-351297ab8fc0 req-96509dd5-6faa-428b-a98e-4ea584b39fcd service nova] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.799772] env[61852]: DEBUG nova.network.neutron [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.829013] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.901679] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.024637] env[61852]: DEBUG nova.compute.manager [req-8949e6c5-fbbc-48cd-8668-1a999f4cd98e req-1d52800b-280c-4f1a-bdcd-2abb1b508245 service nova] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Received event network-vif-deleted-50a2ab09-4218-41fd-8374-f1615bf5386a {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 535.082515] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.084311] env[61852]: DEBUG nova.compute.manager [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 535.087834] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 5.658s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.234430] env[61852]: INFO nova.compute.manager [-] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Took 1.04 seconds to deallocate network for instance. [ 535.237280] env[61852]: DEBUG nova.compute.claims [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 535.237709] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.257570] env[61852]: DEBUG oslo_concurrency.lockutils [req-cfd7cf3c-e196-4e2a-9795-351297ab8fc0 req-96509dd5-6faa-428b-a98e-4ea584b39fcd service nova] Releasing lock "refresh_cache-62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.308682] env[61852]: INFO nova.compute.manager [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] Took 1.05 seconds to deallocate network for instance. [ 535.406273] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Releasing lock "refresh_cache-e377c443-91b5-4d99-a0e8-a9731421a39e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.406331] env[61852]: DEBUG nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 535.406524] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 535.406822] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2a6d9d2-3da2-4154-a64c-f600d6d3ba4f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.425052] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c787b3-327e-47bc-8dc9-001aff460fed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.454276] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e377c443-91b5-4d99-a0e8-a9731421a39e could not be found. [ 535.454276] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 535.454276] env[61852]: INFO nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 535.454276] env[61852]: DEBUG oslo.service.loopingcall [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 535.454276] env[61852]: DEBUG nova.compute.manager [-] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 535.454276] env[61852]: DEBUG nova.network.neutron [-] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 535.504398] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Acquiring lock "e9a7c08d-e021-43d0-b757-6ad0174b4648" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.504578] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Lock "e9a7c08d-e021-43d0-b757-6ad0174b4648" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.538606] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Acquiring lock "b5f994d9-e0aa-4335-8339-df76a1a032ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.539893] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Lock "b5f994d9-e0aa-4335-8339-df76a1a032ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.567699] env[61852]: DEBUG nova.network.neutron [-] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.594454] env[61852]: DEBUG nova.compute.utils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 535.598160] env[61852]: DEBUG nova.compute.manager [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Not allocating networking since 'none' was specified. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 535.883080] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec43ab7-d111-4ee6-a2d5-07888e457a02 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.892451] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ff05e6-738c-4b3c-b14f-004cb1ffb602 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.925578] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561bbc82-136d-4461-9276-ae97dacfe6fe {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.935540] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ee4eed-1a0b-450c-b2a9-80db993bb3d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.949859] env[61852]: DEBUG nova.compute.provider_tree [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 536.072595] env[61852]: DEBUG nova.network.neutron [-] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.104289] env[61852]: DEBUG nova.compute.manager [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 536.351994] env[61852]: INFO nova.scheduler.client.report [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Deleted allocations for instance b9f22589-1803-4688-bad1-8be6965d6c92 [ 536.453419] env[61852]: DEBUG nova.scheduler.client.report [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 536.576476] env[61852]: INFO nova.compute.manager [-] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Took 1.12 seconds to deallocate network for instance. [ 536.579375] env[61852]: DEBUG nova.compute.claims [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 536.579524] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.808989] env[61852]: ERROR nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port de358c72-6a90-452b-8bba-7f78a88b4b17, please check neutron logs for more information. [ 536.808989] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 536.808989] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.808989] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 536.808989] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.808989] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 536.808989] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.808989] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 536.808989] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.808989] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 536.808989] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.808989] env[61852]: ERROR nova.compute.manager raise self.value [ 536.808989] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.808989] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 536.808989] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.808989] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 536.809511] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.809511] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 536.809511] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port de358c72-6a90-452b-8bba-7f78a88b4b17, please check neutron logs for more information. [ 536.809511] env[61852]: ERROR nova.compute.manager [ 536.809511] env[61852]: Traceback (most recent call last): [ 536.809511] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 536.809511] env[61852]: listener.cb(fileno) [ 536.809511] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.809511] env[61852]: result = function(*args, **kwargs) [ 536.809511] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.809511] env[61852]: return func(*args, **kwargs) [ 536.809511] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.809511] env[61852]: raise e [ 536.809511] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.809511] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 536.809511] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.809511] env[61852]: created_port_ids = self._update_ports_for_instance( [ 536.809511] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.809511] env[61852]: with excutils.save_and_reraise_exception(): [ 536.809511] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.809511] env[61852]: self.force_reraise() [ 536.809511] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.809511] env[61852]: raise self.value [ 536.809511] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.809511] env[61852]: updated_port = self._update_port( [ 536.809511] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.809511] env[61852]: _ensure_no_port_binding_failure(port) [ 536.809511] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.809511] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 536.810271] env[61852]: nova.exception.PortBindingFailed: Binding failed for port de358c72-6a90-452b-8bba-7f78a88b4b17, please check neutron logs for more information. [ 536.810271] env[61852]: Removing descriptor: 18 [ 536.810271] env[61852]: ERROR nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port de358c72-6a90-452b-8bba-7f78a88b4b17, please check neutron logs for more information. [ 536.810271] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Traceback (most recent call last): [ 536.810271] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 536.810271] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] yield resources [ 536.810271] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 536.810271] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] self.driver.spawn(context, instance, image_meta, [ 536.810271] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 536.810271] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.810271] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.810271] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] vm_ref = self.build_virtual_machine(instance, [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] for vif in network_info: [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] return self._sync_wrapper(fn, *args, **kwargs) [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] self.wait() [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] self[:] = self._gt.wait() [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] return self._exit_event.wait() [ 536.810633] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] result = hub.switch() [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] return self.greenlet.switch() [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] result = function(*args, **kwargs) [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] return func(*args, **kwargs) [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] raise e [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] nwinfo = self.network_api.allocate_for_instance( [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.810964] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] created_port_ids = self._update_ports_for_instance( [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] with excutils.save_and_reraise_exception(): [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] self.force_reraise() [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] raise self.value [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] updated_port = self._update_port( [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] _ensure_no_port_binding_failure(port) [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.811315] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] raise exception.PortBindingFailed(port_id=port['id']) [ 536.811655] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] nova.exception.PortBindingFailed: Binding failed for port de358c72-6a90-452b-8bba-7f78a88b4b17, please check neutron logs for more information. [ 536.811655] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] [ 536.811655] env[61852]: INFO nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Terminating instance [ 536.813310] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Acquiring lock "refresh_cache-8084d5e2-454b-4003-a9e8-b733fd0322a3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.815042] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Acquired lock "refresh_cache-8084d5e2-454b-4003-a9e8-b733fd0322a3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.815042] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 536.837970] env[61852]: DEBUG nova.compute.manager [req-e590a802-5d7a-43c4-b595-efb9367d0aa7 req-c4308044-d73c-4770-bd44-de123de53882 service nova] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Received event network-vif-deleted-9d81a11d-ffc6-477b-8822-d2ca7437b877 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 536.838331] env[61852]: DEBUG nova.compute.manager [req-e590a802-5d7a-43c4-b595-efb9367d0aa7 req-c4308044-d73c-4770-bd44-de123de53882 service nova] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Received event network-changed-e413caaf-18fa-4232-9c03-9dae4bc25546 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 536.839465] env[61852]: DEBUG nova.compute.manager [req-e590a802-5d7a-43c4-b595-efb9367d0aa7 req-c4308044-d73c-4770-bd44-de123de53882 service nova] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Refreshing instance network info cache due to event network-changed-e413caaf-18fa-4232-9c03-9dae4bc25546. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 536.839465] env[61852]: DEBUG oslo_concurrency.lockutils [req-e590a802-5d7a-43c4-b595-efb9367d0aa7 req-c4308044-d73c-4770-bd44-de123de53882 service nova] Acquiring lock "refresh_cache-e377c443-91b5-4d99-a0e8-a9731421a39e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.839465] env[61852]: DEBUG oslo_concurrency.lockutils [req-e590a802-5d7a-43c4-b595-efb9367d0aa7 req-c4308044-d73c-4770-bd44-de123de53882 service nova] Acquired lock "refresh_cache-e377c443-91b5-4d99-a0e8-a9731421a39e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.839465] env[61852]: DEBUG nova.network.neutron [req-e590a802-5d7a-43c4-b595-efb9367d0aa7 req-c4308044-d73c-4770-bd44-de123de53882 service nova] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Refreshing network info cache for port e413caaf-18fa-4232-9c03-9dae4bc25546 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 536.864727] env[61852]: DEBUG oslo_concurrency.lockutils [None req-76a7eb5e-ba2a-4a69-8953-32b7e9c02507 tempest-FloatingIPsAssociationTestJSON-967172369 tempest-FloatingIPsAssociationTestJSON-967172369-project-member] Lock "b9f22589-1803-4688-bad1-8be6965d6c92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.689s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.866289] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "b9f22589-1803-4688-bad1-8be6965d6c92" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.633s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.866473] env[61852]: INFO nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: b9f22589-1803-4688-bad1-8be6965d6c92] During sync_power_state the instance has a pending task (spawning). Skip. [ 536.868165] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "b9f22589-1803-4688-bad1-8be6965d6c92" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.960837] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.873s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.962264] env[61852]: ERROR nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 255a96e4-ee07-4e40-97c0-76906b0e4e6d, please check neutron logs for more information. [ 536.962264] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Traceback (most recent call last): [ 536.962264] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 536.962264] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] self.driver.spawn(context, instance, image_meta, [ 536.962264] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 536.962264] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.962264] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.962264] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] vm_ref = self.build_virtual_machine(instance, [ 536.962264] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.962264] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.962264] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] for vif in network_info: [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] return self._sync_wrapper(fn, *args, **kwargs) [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] self.wait() [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] self[:] = self._gt.wait() [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] return self._exit_event.wait() [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] result = hub.switch() [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.962578] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] return self.greenlet.switch() [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] result = function(*args, **kwargs) [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] return func(*args, **kwargs) [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] raise e [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] nwinfo = self.network_api.allocate_for_instance( [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] created_port_ids = self._update_ports_for_instance( [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] with excutils.save_and_reraise_exception(): [ 536.962923] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] self.force_reraise() [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] raise self.value [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] updated_port = self._update_port( [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] _ensure_no_port_binding_failure(port) [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] raise exception.PortBindingFailed(port_id=port['id']) [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] nova.exception.PortBindingFailed: Binding failed for port 255a96e4-ee07-4e40-97c0-76906b0e4e6d, please check neutron logs for more information. [ 536.963302] env[61852]: ERROR nova.compute.manager [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] [ 536.963721] env[61852]: DEBUG nova.compute.utils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Binding failed for port 255a96e4-ee07-4e40-97c0-76906b0e4e6d, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 536.964558] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 7.226s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.964696] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.964841] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 536.965150] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.034s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.970065] env[61852]: INFO nova.compute.claims [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 536.974743] env[61852]: DEBUG nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Build of instance b31d126f-5b63-434c-a2c3-c7dc2f40c80f was re-scheduled: Binding failed for port 255a96e4-ee07-4e40-97c0-76906b0e4e6d, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 536.975384] env[61852]: DEBUG nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 536.975717] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquiring lock "refresh_cache-b31d126f-5b63-434c-a2c3-c7dc2f40c80f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.976066] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquired lock "refresh_cache-b31d126f-5b63-434c-a2c3-c7dc2f40c80f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.976323] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 536.979494] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d79a76-74f4-4134-afbc-4f63f712a62d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.991268] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc88a1b-aa91-4189-9dd6-8fd50674aae3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.013107] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b973523-f09d-4655-86d2-940fc85f4ee9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.019237] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6426f3f-1cab-41c5-a78a-ff29c465a024 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.059037] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181548MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 537.059037] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.119840] env[61852]: DEBUG nova.compute.manager [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 537.155697] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 537.155697] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 537.155697] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 537.155697] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 537.157394] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 537.157394] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 537.160911] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 537.161098] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 537.161272] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 537.161435] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 537.161620] env[61852]: DEBUG nova.virt.hardware [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 537.162516] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b552618-9a89-48ec-893f-d24f57597131 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.171845] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5c60bc-848a-4691-b365-10509b80ff91 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.196763] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 537.210873] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 537.211637] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a2e0116-1feb-4615-bbd4-400bfbc2a82d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.226022] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Created folder: OpenStack in parent group-v4. [ 537.226022] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Creating folder: Project (0810d0373087491d81280e3bd0eb3209). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 537.226022] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff61d7ac-8a98-42ea-8de1-0bbbf34ec5e3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.235389] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Created folder: Project (0810d0373087491d81280e3bd0eb3209) in parent group-v277280. [ 537.235389] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Creating folder: Instances. Parent ref: group-v277281. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 537.237363] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e169740-7729-4c62-bf11-34344c5417a4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.247038] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Created folder: Instances in parent group-v277281. [ 537.247339] env[61852]: DEBUG oslo.service.loopingcall [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 537.247489] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 537.247689] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-872f83ee-7bce-4a95-87c3-966dd36e2609 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.266201] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 537.266201] env[61852]: value = "task-1292652" [ 537.266201] env[61852]: _type = "Task" [ 537.266201] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.276172] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292652, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.365699] env[61852]: DEBUG nova.network.neutron [req-e590a802-5d7a-43c4-b595-efb9367d0aa7 req-c4308044-d73c-4770-bd44-de123de53882 service nova] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.368052] env[61852]: DEBUG nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 537.386712] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.491721] env[61852]: DEBUG nova.network.neutron [req-e590a802-5d7a-43c4-b595-efb9367d0aa7 req-c4308044-d73c-4770-bd44-de123de53882 service nova] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.512980] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.572158] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.639767] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.777438] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292652, 'name': CreateVM_Task, 'duration_secs': 0.294344} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.777810] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 537.779596] env[61852]: DEBUG oslo_vmware.service [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee720a2f-1603-4c77-a953-d0887bb24fa6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.789150] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Acquiring lock "db41ed39-0fef-48ea-9197-8d3d8844547a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.789392] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Lock "db41ed39-0fef-48ea-9197-8d3d8844547a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.792570] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.792570] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.793000] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 537.793424] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16237d07-e393-40de-af44-cda302d08802 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.798652] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 537.798652] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b972dc-e318-29fb-fbae-543117583c1c" [ 537.798652] env[61852]: _type = "Task" [ 537.798652] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.808646] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b972dc-e318-29fb-fbae-543117583c1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.908938] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.989390] env[61852]: DEBUG nova.compute.manager [req-0b3fcb28-67f8-4693-91d8-d4ad03227bcf req-c42e4051-052e-45fe-9b2a-635aa46148dd service nova] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Received event network-changed-de358c72-6a90-452b-8bba-7f78a88b4b17 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 537.989390] env[61852]: DEBUG nova.compute.manager [req-0b3fcb28-67f8-4693-91d8-d4ad03227bcf req-c42e4051-052e-45fe-9b2a-635aa46148dd service nova] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Refreshing instance network info cache due to event network-changed-de358c72-6a90-452b-8bba-7f78a88b4b17. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 537.989390] env[61852]: DEBUG oslo_concurrency.lockutils [req-0b3fcb28-67f8-4693-91d8-d4ad03227bcf req-c42e4051-052e-45fe-9b2a-635aa46148dd service nova] Acquiring lock "refresh_cache-8084d5e2-454b-4003-a9e8-b733fd0322a3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.995141] env[61852]: DEBUG oslo_concurrency.lockutils [req-e590a802-5d7a-43c4-b595-efb9367d0aa7 req-c4308044-d73c-4770-bd44-de123de53882 service nova] Releasing lock "refresh_cache-e377c443-91b5-4d99-a0e8-a9731421a39e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.995368] env[61852]: DEBUG nova.compute.manager [req-e590a802-5d7a-43c4-b595-efb9367d0aa7 req-c4308044-d73c-4770-bd44-de123de53882 service nova] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Received event network-vif-deleted-e413caaf-18fa-4232-9c03-9dae4bc25546 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 538.075127] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Releasing lock "refresh_cache-8084d5e2-454b-4003-a9e8-b733fd0322a3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.075544] env[61852]: DEBUG nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 538.075737] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 538.076041] env[61852]: DEBUG oslo_concurrency.lockutils [req-0b3fcb28-67f8-4693-91d8-d4ad03227bcf req-c42e4051-052e-45fe-9b2a-635aa46148dd service nova] Acquired lock "refresh_cache-8084d5e2-454b-4003-a9e8-b733fd0322a3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.076214] env[61852]: DEBUG nova.network.neutron [req-0b3fcb28-67f8-4693-91d8-d4ad03227bcf req-c42e4051-052e-45fe-9b2a-635aa46148dd service nova] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Refreshing network info cache for port de358c72-6a90-452b-8bba-7f78a88b4b17 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 538.077252] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a77082a-0ee5-4206-925d-4a2ec46c08f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.088373] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c566d5ec-bb0a-4374-9a73-1fd4672417fb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.117258] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8084d5e2-454b-4003-a9e8-b733fd0322a3 could not be found. [ 538.117502] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 538.117678] env[61852]: INFO nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 538.117916] env[61852]: DEBUG oslo.service.loopingcall [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 538.120905] env[61852]: DEBUG nova.compute.manager [-] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 538.120905] env[61852]: DEBUG nova.network.neutron [-] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 538.142095] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Releasing lock "refresh_cache-b31d126f-5b63-434c-a2c3-c7dc2f40c80f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.142328] env[61852]: DEBUG nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 538.142493] env[61852]: DEBUG nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 538.142656] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 538.155545] env[61852]: DEBUG nova.network.neutron [-] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.164482] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.254995] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93eccb62-78b0-4d87-aed6-583a54c1e931 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.263766] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d89608f-ee5f-434b-802d-4bb0a42f51e3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.307850] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48934d9-584f-4e2d-9773-f084edf82dcd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.318842] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.319147] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 538.319829] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 538.319829] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.319942] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 538.320607] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f383799-0080-41f5-b0d4-d52c3fac5f8f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.326431] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1c033c-785d-40fb-a2da-d9b1cf6cdb77 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.341534] env[61852]: DEBUG nova.compute.provider_tree [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.344817] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 538.344817] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 538.345581] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e6d880-2a12-4eea-96e2-079eb73761fe {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.352645] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f003b64-c15a-4d88-a789-458ef73170aa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.358534] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 538.358534] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52172baa-a928-956d-cc29-a2c27affa822" [ 538.358534] env[61852]: _type = "Task" [ 538.358534] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 538.366293] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52172baa-a928-956d-cc29-a2c27affa822, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 538.597213] env[61852]: DEBUG nova.network.neutron [req-0b3fcb28-67f8-4693-91d8-d4ad03227bcf req-c42e4051-052e-45fe-9b2a-635aa46148dd service nova] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.661044] env[61852]: DEBUG nova.network.neutron [-] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.671978] env[61852]: DEBUG nova.network.neutron [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.675896] env[61852]: DEBUG nova.network.neutron [req-0b3fcb28-67f8-4693-91d8-d4ad03227bcf req-c42e4051-052e-45fe-9b2a-635aa46148dd service nova] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.844823] env[61852]: DEBUG nova.scheduler.client.report [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 538.870393] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Preparing fetch location {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 538.871119] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Creating directory with path [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 538.871576] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b1fcff4-0561-42b7-b01e-5d9f51f97fdf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.894022] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Created directory with path [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 538.894022] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Fetch image to [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 538.894022] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Downloading image file data 90fd8f39-16b3-43e0-a682-0ec131005e31 to [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk on the data store datastore1 {{(pid=61852) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 538.894879] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9a1bf4-739b-4f87-830f-6b943c633768 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.910216] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0864ebe-5151-49a3-8534-c84522df2524 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.927732] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74682ec-2377-4a80-98c0-f6974f5117c2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.966809] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56184a7d-d8fd-422a-b27b-4be53705168b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.975195] env[61852]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8b5dd99a-7b21-49f5-96fd-4d94d487a2e0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.998951] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Downloading image file data 90fd8f39-16b3-43e0-a682-0ec131005e31 to the data store datastore1 {{(pid=61852) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 539.066030] env[61852]: DEBUG oslo_vmware.rw_handles [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61852) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 539.164577] env[61852]: INFO nova.compute.manager [-] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Took 1.04 seconds to deallocate network for instance. [ 539.170199] env[61852]: DEBUG nova.compute.claims [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 539.170435] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.176888] env[61852]: INFO nova.compute.manager [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] Took 1.03 seconds to deallocate network for instance. [ 539.187361] env[61852]: DEBUG oslo_concurrency.lockutils [req-0b3fcb28-67f8-4693-91d8-d4ad03227bcf req-c42e4051-052e-45fe-9b2a-635aa46148dd service nova] Releasing lock "refresh_cache-8084d5e2-454b-4003-a9e8-b733fd0322a3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.187361] env[61852]: DEBUG nova.compute.manager [req-0b3fcb28-67f8-4693-91d8-d4ad03227bcf req-c42e4051-052e-45fe-9b2a-635aa46148dd service nova] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Received event network-vif-deleted-de358c72-6a90-452b-8bba-7f78a88b4b17 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 539.357626] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.357626] env[61852]: DEBUG nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 539.364473] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.556s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.366416] env[61852]: INFO nova.compute.claims [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 539.735366] env[61852]: DEBUG oslo_vmware.rw_handles [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Completed reading data from the image iterator. {{(pid=61852) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 539.735519] env[61852]: DEBUG oslo_vmware.rw_handles [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 539.869800] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Downloaded image file data 90fd8f39-16b3-43e0-a682-0ec131005e31 to vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk on the data store datastore1 {{(pid=61852) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 539.870898] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Caching image {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 539.871863] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Copying Virtual Disk [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk to [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 539.875276] env[61852]: DEBUG nova.compute.utils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 539.878251] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da507639-9165-48e1-a66e-28c60cad3ef8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.889297] env[61852]: DEBUG nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 539.889478] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 539.898524] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 539.898524] env[61852]: value = "task-1292653" [ 539.898524] env[61852]: _type = "Task" [ 539.898524] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.911645] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292653, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.984455] env[61852]: DEBUG nova.policy [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd032b9d26a72416885d080dc2922e815', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01d1223a327f4442b01edf22f3fc578a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 540.216844] env[61852]: INFO nova.scheduler.client.report [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Deleted allocations for instance b31d126f-5b63-434c-a2c3-c7dc2f40c80f [ 540.350461] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Successfully created port: 9fd5ccc7-d886-4008-8822-f7e5ae8d217f {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 540.379435] env[61852]: DEBUG nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 540.417087] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Acquiring lock "2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.417710] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Lock "2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.417710] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292653, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.420628] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquiring lock "0f6293bd-3096-4deb-a388-9a3e8b2e5926" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.420878] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "0f6293bd-3096-4deb-a388-9a3e8b2e5926" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.730702] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1f7cdaba-7f0d-45a3-8b54-6e82d2060b66 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "b31d126f-5b63-434c-a2c3-c7dc2f40c80f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.755s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.731518] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43447b63-56d8-4297-adc4-a3979f9629d3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.738568] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "b31d126f-5b63-434c-a2c3-c7dc2f40c80f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 11.505s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.738813] env[61852]: INFO nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: b31d126f-5b63-434c-a2c3-c7dc2f40c80f] During sync_power_state the instance has a pending task (spawning). Skip. [ 540.739065] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "b31d126f-5b63-434c-a2c3-c7dc2f40c80f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.745010] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbf7b72-167f-4b8e-b429-56e28bf73472 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.781012] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541d8232-8800-42ca-ba22-2cc1d7e1a505 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.790723] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c816f878-1f5e-43d2-9444-5985ca000070 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.806138] env[61852]: DEBUG nova.compute.provider_tree [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 540.913080] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292653, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.673501} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.913357] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Copied Virtual Disk [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk to [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 540.913528] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Deleting the datastore file [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 540.915999] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab6c0a86-ab17-44ce-b4a7-f5d094b81980 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.924031] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 540.924031] env[61852]: value = "task-1292654" [ 540.924031] env[61852]: _type = "Task" [ 540.924031] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.935030] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292654, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.242179] env[61852]: DEBUG nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 541.311699] env[61852]: DEBUG nova.scheduler.client.report [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 541.389768] env[61852]: DEBUG nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 541.422872] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 541.423047] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 541.423214] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 541.423403] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 541.423551] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 541.423698] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 541.423904] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 541.424103] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 541.424300] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 541.424442] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 541.424608] env[61852]: DEBUG nova.virt.hardware [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 541.425593] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aea48cf-5438-4e2e-a016-d387508df278 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.439490] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fe5d94-ea11-4b22-b116-0fe5bbfc9a58 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.443543] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292654, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024994} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.443831] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 541.443960] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Moving file from [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082/90fd8f39-16b3-43e0-a682-0ec131005e31 to [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31. {{(pid=61852) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 541.444666] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-37fd13d4-2143-4e64-99f8-ed48a85d80da {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.462490] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 541.462490] env[61852]: value = "task-1292655" [ 541.462490] env[61852]: _type = "Task" [ 541.462490] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.472268] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292655, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.779084] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.818637] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.819787] env[61852]: DEBUG nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 541.825525] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.992s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.830049] env[61852]: INFO nova.compute.claims [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 541.977036] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292655, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.02443} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.977036] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] File moved {{(pid=61852) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 541.977036] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Cleaning up location [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 541.977259] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Deleting the datastore file [datastore1] vmware_temp/40f4dc1b-c1f2-4e71-a6ca-36cdb5a65082 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 541.977666] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7a9bf16-5c2f-41f1-b82a-2e54a5ee1fa6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.986559] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 541.986559] env[61852]: value = "task-1292656" [ 541.986559] env[61852]: _type = "Task" [ 541.986559] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.998430] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.337611] env[61852]: DEBUG nova.compute.utils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 542.339307] env[61852]: DEBUG nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 542.339438] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 542.479291] env[61852]: DEBUG nova.policy [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be922c40dddf48c8ae436d0a244e7b6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdac3605118e44a69d44ab56cafe2e21', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 542.496954] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292656, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03155} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.497505] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 542.498049] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6bd1fd8-efeb-423b-99ab-b8acac76e942 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.503700] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 542.503700] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b027d3-72db-9d66-997f-542493284b24" [ 542.503700] env[61852]: _type = "Task" [ 542.503700] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.517151] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b027d3-72db-9d66-997f-542493284b24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.535738] env[61852]: ERROR nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9fd5ccc7-d886-4008-8822-f7e5ae8d217f, please check neutron logs for more information. [ 542.535738] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 542.535738] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 542.535738] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 542.535738] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 542.535738] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 542.535738] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 542.535738] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 542.535738] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 542.535738] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 542.535738] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 542.535738] env[61852]: ERROR nova.compute.manager raise self.value [ 542.535738] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 542.535738] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 542.535738] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 542.535738] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 542.536237] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 542.536237] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 542.536237] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9fd5ccc7-d886-4008-8822-f7e5ae8d217f, please check neutron logs for more information. [ 542.536237] env[61852]: ERROR nova.compute.manager [ 542.536237] env[61852]: Traceback (most recent call last): [ 542.536237] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 542.536237] env[61852]: listener.cb(fileno) [ 542.536237] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 542.536237] env[61852]: result = function(*args, **kwargs) [ 542.536237] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 542.536237] env[61852]: return func(*args, **kwargs) [ 542.536237] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 542.536237] env[61852]: raise e [ 542.536237] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 542.536237] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 542.536237] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 542.536237] env[61852]: created_port_ids = self._update_ports_for_instance( [ 542.536237] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 542.536237] env[61852]: with excutils.save_and_reraise_exception(): [ 542.536237] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 542.536237] env[61852]: self.force_reraise() [ 542.536237] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 542.536237] env[61852]: raise self.value [ 542.536237] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 542.536237] env[61852]: updated_port = self._update_port( [ 542.536237] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 542.536237] env[61852]: _ensure_no_port_binding_failure(port) [ 542.536237] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 542.536237] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 542.537227] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 9fd5ccc7-d886-4008-8822-f7e5ae8d217f, please check neutron logs for more information. [ 542.537227] env[61852]: Removing descriptor: 18 [ 542.537227] env[61852]: ERROR nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9fd5ccc7-d886-4008-8822-f7e5ae8d217f, please check neutron logs for more information. [ 542.537227] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Traceback (most recent call last): [ 542.537227] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 542.537227] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] yield resources [ 542.537227] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 542.537227] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] self.driver.spawn(context, instance, image_meta, [ 542.537227] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 542.537227] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] self._vmops.spawn(context, instance, image_meta, injected_files, [ 542.537227] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 542.537227] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] vm_ref = self.build_virtual_machine(instance, [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] vif_infos = vmwarevif.get_vif_info(self._session, [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] for vif in network_info: [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] return self._sync_wrapper(fn, *args, **kwargs) [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] self.wait() [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] self[:] = self._gt.wait() [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] return self._exit_event.wait() [ 542.537621] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] result = hub.switch() [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] return self.greenlet.switch() [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] result = function(*args, **kwargs) [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] return func(*args, **kwargs) [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] raise e [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] nwinfo = self.network_api.allocate_for_instance( [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 542.538036] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] created_port_ids = self._update_ports_for_instance( [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] with excutils.save_and_reraise_exception(): [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] self.force_reraise() [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] raise self.value [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] updated_port = self._update_port( [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] _ensure_no_port_binding_failure(port) [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 542.538355] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] raise exception.PortBindingFailed(port_id=port['id']) [ 542.538696] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] nova.exception.PortBindingFailed: Binding failed for port 9fd5ccc7-d886-4008-8822-f7e5ae8d217f, please check neutron logs for more information. [ 542.538696] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] [ 542.538696] env[61852]: INFO nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Terminating instance [ 542.539821] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "refresh_cache-f112b2be-fbd7-4a01-b369-25fe490e4204" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.539981] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquired lock "refresh_cache-f112b2be-fbd7-4a01-b369-25fe490e4204" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.540174] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 542.845124] env[61852]: DEBUG nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 543.019766] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b027d3-72db-9d66-997f-542493284b24, 'name': SearchDatastore_Task, 'duration_secs': 0.010711} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.020989] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.020989] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 543.020989] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ac5dbc8-a25b-4201-803c-9b3d8b470180 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.027798] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 543.027798] env[61852]: value = "task-1292657" [ 543.027798] env[61852]: _type = "Task" [ 543.027798] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.037930] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.083706] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 543.165018] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e1a5f4-d25e-4cd8-9133-4b655f87369a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.174263] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed088ec-89e8-4171-b92d-0201f72dbc31 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.218753] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Successfully created port: 0de7a94b-007f-4544-a3de-0dd7ab89898c {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 543.221204] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230e32b9-3699-436c-99f4-259363e0b5ac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.229217] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48f7215-38f5-4d3f-9427-bf7f4c7dc140 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.245948] env[61852]: DEBUG nova.compute.provider_tree [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.254469] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquiring lock "068ced45-4c50-4cfd-bd94-fa1dad29e5b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.254721] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "068ced45-4c50-4cfd-bd94-fa1dad29e5b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.269329] env[61852]: DEBUG nova.compute.manager [req-a3c5b0cb-f4ee-42d7-98b5-d4852bb1c5db req-e31e3075-d6bd-4958-a8aa-d7e595a571d8 service nova] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Received event network-changed-9fd5ccc7-d886-4008-8822-f7e5ae8d217f {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 543.269548] env[61852]: DEBUG nova.compute.manager [req-a3c5b0cb-f4ee-42d7-98b5-d4852bb1c5db req-e31e3075-d6bd-4958-a8aa-d7e595a571d8 service nova] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Refreshing instance network info cache due to event network-changed-9fd5ccc7-d886-4008-8822-f7e5ae8d217f. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 543.269762] env[61852]: DEBUG oslo_concurrency.lockutils [req-a3c5b0cb-f4ee-42d7-98b5-d4852bb1c5db req-e31e3075-d6bd-4958-a8aa-d7e595a571d8 service nova] Acquiring lock "refresh_cache-f112b2be-fbd7-4a01-b369-25fe490e4204" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.422541] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.541963] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292657, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.753335] env[61852]: DEBUG nova.scheduler.client.report [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 543.861878] env[61852]: DEBUG nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 543.889636] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 543.889887] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 543.890253] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.890615] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 543.890776] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.891222] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 543.891222] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 543.891424] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 543.891657] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 543.891832] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 543.892018] env[61852]: DEBUG nova.virt.hardware [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 543.893145] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9153f2e4-dd82-4b69-b670-aa1e876ab866 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.904751] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e29fdb-d457-4aed-973b-6c1a01111d3e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.925373] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Releasing lock "refresh_cache-f112b2be-fbd7-4a01-b369-25fe490e4204" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.926155] env[61852]: DEBUG nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 543.926155] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 543.926420] env[61852]: DEBUG oslo_concurrency.lockutils [req-a3c5b0cb-f4ee-42d7-98b5-d4852bb1c5db req-e31e3075-d6bd-4958-a8aa-d7e595a571d8 service nova] Acquired lock "refresh_cache-f112b2be-fbd7-4a01-b369-25fe490e4204" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.926623] env[61852]: DEBUG nova.network.neutron [req-a3c5b0cb-f4ee-42d7-98b5-d4852bb1c5db req-e31e3075-d6bd-4958-a8aa-d7e595a571d8 service nova] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Refreshing network info cache for port 9fd5ccc7-d886-4008-8822-f7e5ae8d217f {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 543.927611] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d4e8d2e-e7ff-42df-938a-14aba3efcafb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.939804] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cc1f8b-4ecb-4924-8570-c4bba9bdcacf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.963540] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f112b2be-fbd7-4a01-b369-25fe490e4204 could not be found. [ 543.963772] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 543.964071] env[61852]: INFO nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Took 0.04 seconds to destroy the instance on the hypervisor. [ 543.964217] env[61852]: DEBUG oslo.service.loopingcall [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 543.964481] env[61852]: DEBUG nova.compute.manager [-] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 543.964520] env[61852]: DEBUG nova.network.neutron [-] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 543.999554] env[61852]: DEBUG nova.network.neutron [-] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.039322] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292657, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631671} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 544.039622] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 544.039844] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 544.043020] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d06000a-265f-478e-9ee0-747e4baee36e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.049912] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 544.049912] env[61852]: value = "task-1292658" [ 544.049912] env[61852]: _type = "Task" [ 544.049912] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.056758] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292658, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.258714] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.262760] env[61852]: DEBUG nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 544.266112] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.404s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.489956] env[61852]: DEBUG nova.network.neutron [req-a3c5b0cb-f4ee-42d7-98b5-d4852bb1c5db req-e31e3075-d6bd-4958-a8aa-d7e595a571d8 service nova] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.503567] env[61852]: DEBUG nova.network.neutron [-] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.561739] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292658, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063561} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 544.562064] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 544.563275] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32e3912-cb31-4d0d-98ea-6f9a32d3de90 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.587131] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 544.587455] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f637b1d-ce2a-48a2-9f6f-30b7d97ca54b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.608922] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 544.608922] env[61852]: value = "task-1292659" [ 544.608922] env[61852]: _type = "Task" [ 544.608922] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.617570] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292659, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.769640] env[61852]: DEBUG nova.compute.utils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 544.777927] env[61852]: DEBUG nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 544.778206] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 544.811813] env[61852]: DEBUG nova.network.neutron [req-a3c5b0cb-f4ee-42d7-98b5-d4852bb1c5db req-e31e3075-d6bd-4958-a8aa-d7e595a571d8 service nova] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.905285] env[61852]: DEBUG nova.policy [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd032b9d26a72416885d080dc2922e815', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01d1223a327f4442b01edf22f3fc578a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 545.009360] env[61852]: INFO nova.compute.manager [-] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Took 1.04 seconds to deallocate network for instance. [ 545.013487] env[61852]: DEBUG nova.compute.claims [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 545.013487] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.121967] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292659, 'name': ReconfigVM_Task, 'duration_secs': 0.333591} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.125814] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Reconfigured VM instance instance-00000007 to attach disk [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 545.126978] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52b0227-fdcc-4d1b-89d7-17f890b33f13 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.130690] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fafd7c4-2a67-477e-ad72-0843dfd86389 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.140117] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89e5ec4-c08b-4224-b857-8cc1b4f50165 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.144429] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 545.144429] env[61852]: value = "task-1292660" [ 545.144429] env[61852]: _type = "Task" [ 545.144429] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.173691] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3b43c7-a703-4283-9c4e-4917bc92fd67 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.179602] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292660, 'name': Rename_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.184942] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f25ca9-b55f-491f-864f-dbf02609ef67 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.198846] env[61852]: DEBUG nova.compute.provider_tree [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.278896] env[61852]: DEBUG nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 545.319703] env[61852]: DEBUG oslo_concurrency.lockutils [req-a3c5b0cb-f4ee-42d7-98b5-d4852bb1c5db req-e31e3075-d6bd-4958-a8aa-d7e595a571d8 service nova] Releasing lock "refresh_cache-f112b2be-fbd7-4a01-b369-25fe490e4204" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.658311] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292660, 'name': Rename_Task, 'duration_secs': 0.174526} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.659598] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 545.659932] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34b0c2e1-1454-4bb4-98a2-229f60bbf9f5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.670036] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 545.670036] env[61852]: value = "task-1292661" [ 545.670036] env[61852]: _type = "Task" [ 545.670036] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.679276] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292661, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.701499] env[61852]: DEBUG nova.scheduler.client.report [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.087143] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Successfully created port: bc67f150-5d95-4527-b429-529707a6d170 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.136368] env[61852]: DEBUG nova.compute.manager [req-134412ca-937c-4a65-b53f-3f36dcfafe6d req-76d8bbd6-dab1-41d4-b1ae-bab700e4f1d9 service nova] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Received event network-vif-deleted-9fd5ccc7-d886-4008-8822-f7e5ae8d217f {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 546.184196] env[61852]: DEBUG oslo_vmware.api [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292661, 'name': PowerOnVM_Task, 'duration_secs': 0.428217} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.184761] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 546.187814] env[61852]: INFO nova.compute.manager [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Took 9.06 seconds to spawn the instance on the hypervisor. [ 546.187814] env[61852]: DEBUG nova.compute.manager [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 546.187814] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb69b026-5fbf-4c59-b9af-1937cb823618 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.207106] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.941s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.207469] env[61852]: ERROR nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 50a2ab09-4218-41fd-8374-f1615bf5386a, please check neutron logs for more information. [ 546.207469] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Traceback (most recent call last): [ 546.207469] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 546.207469] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] self.driver.spawn(context, instance, image_meta, [ 546.207469] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 546.207469] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] self._vmops.spawn(context, instance, image_meta, injected_files, [ 546.207469] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 546.207469] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] vm_ref = self.build_virtual_machine(instance, [ 546.207469] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 546.207469] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] vif_infos = vmwarevif.get_vif_info(self._session, [ 546.207469] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] for vif in network_info: [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] return self._sync_wrapper(fn, *args, **kwargs) [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] self.wait() [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] self[:] = self._gt.wait() [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] return self._exit_event.wait() [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] result = hub.switch() [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 546.207798] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] return self.greenlet.switch() [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] result = function(*args, **kwargs) [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] return func(*args, **kwargs) [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] raise e [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] nwinfo = self.network_api.allocate_for_instance( [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] created_port_ids = self._update_ports_for_instance( [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] with excutils.save_and_reraise_exception(): [ 546.208191] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] self.force_reraise() [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] raise self.value [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] updated_port = self._update_port( [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] _ensure_no_port_binding_failure(port) [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] raise exception.PortBindingFailed(port_id=port['id']) [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] nova.exception.PortBindingFailed: Binding failed for port 50a2ab09-4218-41fd-8374-f1615bf5386a, please check neutron logs for more information. [ 546.208512] env[61852]: ERROR nova.compute.manager [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] [ 546.208797] env[61852]: DEBUG nova.compute.utils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Binding failed for port 50a2ab09-4218-41fd-8374-f1615bf5386a, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 546.209848] env[61852]: DEBUG nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Build of instance bb04c866-2e19-48e9-9aa5-89af0e56d735 was re-scheduled: Binding failed for port 50a2ab09-4218-41fd-8374-f1615bf5386a, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 546.210274] env[61852]: DEBUG nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 546.210500] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Acquiring lock "refresh_cache-bb04c866-2e19-48e9-9aa5-89af0e56d735" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.210645] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Acquired lock "refresh_cache-bb04c866-2e19-48e9-9aa5-89af0e56d735" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.210795] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 546.211885] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.974s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.290522] env[61852]: DEBUG nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 546.323884] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.323884] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.324076] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.324187] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.324352] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.324476] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.324675] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.325131] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.328144] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.328371] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.328561] env[61852]: DEBUG nova.virt.hardware [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.329490] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4696e632-ab19-4a1a-be19-eaa7e2f61df4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.340527] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36189cf0-d95f-4423-9f67-b4bcbbe48f24 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.720028] env[61852]: INFO nova.compute.manager [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Took 19.13 seconds to build instance. [ 546.772797] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.025966] env[61852]: DEBUG nova.compute.manager [req-b69c52b3-a6fe-4f5c-934f-e4332ae78445 req-e71d56a4-3fba-4833-85f8-2377636dbb42 service nova] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Received event network-changed-0de7a94b-007f-4544-a3de-0dd7ab89898c {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 547.026209] env[61852]: DEBUG nova.compute.manager [req-b69c52b3-a6fe-4f5c-934f-e4332ae78445 req-e71d56a4-3fba-4833-85f8-2377636dbb42 service nova] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Refreshing instance network info cache due to event network-changed-0de7a94b-007f-4544-a3de-0dd7ab89898c. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 547.026426] env[61852]: DEBUG oslo_concurrency.lockutils [req-b69c52b3-a6fe-4f5c-934f-e4332ae78445 req-e71d56a4-3fba-4833-85f8-2377636dbb42 service nova] Acquiring lock "refresh_cache-eea17bb5-01e3-4144-a579-2a56be8154c4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.026571] env[61852]: DEBUG oslo_concurrency.lockutils [req-b69c52b3-a6fe-4f5c-934f-e4332ae78445 req-e71d56a4-3fba-4833-85f8-2377636dbb42 service nova] Acquired lock "refresh_cache-eea17bb5-01e3-4144-a579-2a56be8154c4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.026714] env[61852]: DEBUG nova.network.neutron [req-b69c52b3-a6fe-4f5c-934f-e4332ae78445 req-e71d56a4-3fba-4833-85f8-2377636dbb42 service nova] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Refreshing network info cache for port 0de7a94b-007f-4544-a3de-0dd7ab89898c {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 547.037321] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b39a4e-3c89-44b6-87f4-3537d836d053 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.047199] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d6ebd0-bcb0-4302-9339-56d4792f203d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.082809] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690645fb-5c2f-4172-9b1e-4159fa40c906 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.090951] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a532ca-a353-42d6-8318-98f4ac15295d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.111358] env[61852]: DEBUG nova.compute.provider_tree [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.123607] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.225107] env[61852]: DEBUG oslo_concurrency.lockutils [None req-289daa1e-8e6f-45be-b039-273e3a003726 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "d75e131b-1933-4e1f-bcf1-62ed83779177" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.645s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.277298] env[61852]: ERROR nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0de7a94b-007f-4544-a3de-0dd7ab89898c, please check neutron logs for more information. [ 547.277298] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 547.277298] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.277298] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 547.277298] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.277298] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 547.277298] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.277298] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 547.277298] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.277298] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 547.277298] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.277298] env[61852]: ERROR nova.compute.manager raise self.value [ 547.277298] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.277298] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 547.277298] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.277298] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 547.277736] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.277736] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 547.277736] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0de7a94b-007f-4544-a3de-0dd7ab89898c, please check neutron logs for more information. [ 547.277736] env[61852]: ERROR nova.compute.manager [ 547.277736] env[61852]: Traceback (most recent call last): [ 547.277736] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 547.277736] env[61852]: listener.cb(fileno) [ 547.277736] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.277736] env[61852]: result = function(*args, **kwargs) [ 547.277736] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 547.277736] env[61852]: return func(*args, **kwargs) [ 547.277736] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 547.277736] env[61852]: raise e [ 547.277736] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.277736] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 547.277736] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.277736] env[61852]: created_port_ids = self._update_ports_for_instance( [ 547.277736] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.277736] env[61852]: with excutils.save_and_reraise_exception(): [ 547.277736] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.277736] env[61852]: self.force_reraise() [ 547.277736] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.277736] env[61852]: raise self.value [ 547.277736] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.277736] env[61852]: updated_port = self._update_port( [ 547.277736] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.277736] env[61852]: _ensure_no_port_binding_failure(port) [ 547.277736] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.277736] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 547.278473] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 0de7a94b-007f-4544-a3de-0dd7ab89898c, please check neutron logs for more information. [ 547.278473] env[61852]: Removing descriptor: 16 [ 547.278473] env[61852]: ERROR nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0de7a94b-007f-4544-a3de-0dd7ab89898c, please check neutron logs for more information. [ 547.278473] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Traceback (most recent call last): [ 547.278473] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 547.278473] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] yield resources [ 547.278473] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 547.278473] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] self.driver.spawn(context, instance, image_meta, [ 547.278473] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 547.278473] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 547.278473] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 547.278473] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] vm_ref = self.build_virtual_machine(instance, [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] vif_infos = vmwarevif.get_vif_info(self._session, [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] for vif in network_info: [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] return self._sync_wrapper(fn, *args, **kwargs) [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] self.wait() [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] self[:] = self._gt.wait() [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] return self._exit_event.wait() [ 547.278845] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] result = hub.switch() [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] return self.greenlet.switch() [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] result = function(*args, **kwargs) [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] return func(*args, **kwargs) [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] raise e [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] nwinfo = self.network_api.allocate_for_instance( [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.279181] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] created_port_ids = self._update_ports_for_instance( [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] with excutils.save_and_reraise_exception(): [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] self.force_reraise() [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] raise self.value [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] updated_port = self._update_port( [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] _ensure_no_port_binding_failure(port) [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.279505] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] raise exception.PortBindingFailed(port_id=port['id']) [ 547.279810] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] nova.exception.PortBindingFailed: Binding failed for port 0de7a94b-007f-4544-a3de-0dd7ab89898c, please check neutron logs for more information. [ 547.279810] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] [ 547.279810] env[61852]: INFO nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Terminating instance [ 547.281460] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-eea17bb5-01e3-4144-a579-2a56be8154c4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.614495] env[61852]: DEBUG nova.scheduler.client.report [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 547.619116] env[61852]: DEBUG nova.network.neutron [req-b69c52b3-a6fe-4f5c-934f-e4332ae78445 req-e71d56a4-3fba-4833-85f8-2377636dbb42 service nova] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.627031] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Releasing lock "refresh_cache-bb04c866-2e19-48e9-9aa5-89af0e56d735" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.627287] env[61852]: DEBUG nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 547.627517] env[61852]: DEBUG nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 547.627760] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 547.728092] env[61852]: DEBUG nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 547.786387] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.922926] env[61852]: DEBUG nova.network.neutron [req-b69c52b3-a6fe-4f5c-934f-e4332ae78445 req-e71d56a4-3fba-4833-85f8-2377636dbb42 service nova] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.125963] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.914s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.127874] env[61852]: ERROR nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9d81a11d-ffc6-477b-8822-d2ca7437b877, please check neutron logs for more information. [ 548.127874] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Traceback (most recent call last): [ 548.127874] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 548.127874] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] self.driver.spawn(context, instance, image_meta, [ 548.127874] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 548.127874] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.127874] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.127874] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] vm_ref = self.build_virtual_machine(instance, [ 548.127874] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.127874] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.127874] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] for vif in network_info: [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] return self._sync_wrapper(fn, *args, **kwargs) [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] self.wait() [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] self[:] = self._gt.wait() [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] return self._exit_event.wait() [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] result = hub.switch() [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.128246] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] return self.greenlet.switch() [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] result = function(*args, **kwargs) [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] return func(*args, **kwargs) [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] raise e [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] nwinfo = self.network_api.allocate_for_instance( [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] created_port_ids = self._update_ports_for_instance( [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] with excutils.save_and_reraise_exception(): [ 548.128554] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] self.force_reraise() [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] raise self.value [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] updated_port = self._update_port( [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] _ensure_no_port_binding_failure(port) [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] raise exception.PortBindingFailed(port_id=port['id']) [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] nova.exception.PortBindingFailed: Binding failed for port 9d81a11d-ffc6-477b-8822-d2ca7437b877, please check neutron logs for more information. [ 548.128948] env[61852]: ERROR nova.compute.manager [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] [ 548.129291] env[61852]: DEBUG nova.compute.utils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Binding failed for port 9d81a11d-ffc6-477b-8822-d2ca7437b877, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 548.132334] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.552s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.136794] env[61852]: DEBUG nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Build of instance 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc was re-scheduled: Binding failed for port 9d81a11d-ffc6-477b-8822-d2ca7437b877, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 548.137315] env[61852]: DEBUG nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 548.137541] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Acquiring lock "refresh_cache-62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.137802] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Acquired lock "refresh_cache-62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.137916] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 548.238013] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquiring lock "8c872e97-44ca-48c9-b7bb-02dca695ad8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.238267] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "8c872e97-44ca-48c9-b7bb-02dca695ad8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.263136] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.291345] env[61852]: DEBUG nova.network.neutron [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.428609] env[61852]: DEBUG oslo_concurrency.lockutils [req-b69c52b3-a6fe-4f5c-934f-e4332ae78445 req-e71d56a4-3fba-4833-85f8-2377636dbb42 service nova] Releasing lock "refresh_cache-eea17bb5-01e3-4144-a579-2a56be8154c4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.429672] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-eea17bb5-01e3-4144-a579-2a56be8154c4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.429672] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 548.711144] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 548.792809] env[61852]: INFO nova.compute.manager [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] Took 1.16 seconds to deallocate network for instance. [ 548.933364] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.025134] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d1ee1c-0f24-4ae0-bdd6-fc4668cd4b63 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.034915] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea638bb-23a8-49db-bc7d-ed294d220337 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.040132] env[61852]: INFO nova.compute.manager [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Rebuilding instance [ 549.075323] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f50c8a-c9c9-4d02-abfe-a30bfb0b04a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.084994] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a20b401-a8ac-4ad3-81c9-a890b2ad19ee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.107966] env[61852]: DEBUG nova.compute.provider_tree [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.119513] env[61852]: DEBUG nova.compute.manager [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 549.120342] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ea1317-3920-4452-bec3-6d99a32a80e5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.138424] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.412550] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.438194] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Releasing lock "refresh_cache-62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.438351] env[61852]: DEBUG nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 549.438515] env[61852]: DEBUG nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 549.438681] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 549.464518] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.610761] env[61852]: DEBUG nova.scheduler.client.report [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.636163] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 549.636163] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4baf3705-a2f8-4736-af6e-1349b8f28881 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.646670] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 549.646670] env[61852]: value = "task-1292662" [ 549.646670] env[61852]: _type = "Task" [ 549.646670] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.662506] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292662, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.699301] env[61852]: ERROR nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bc67f150-5d95-4527-b429-529707a6d170, please check neutron logs for more information. [ 549.699301] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 549.699301] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.699301] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 549.699301] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.699301] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 549.699301] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.699301] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 549.699301] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.699301] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 549.699301] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.699301] env[61852]: ERROR nova.compute.manager raise self.value [ 549.699301] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.699301] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 549.699301] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.699301] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 549.699775] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.699775] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 549.699775] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bc67f150-5d95-4527-b429-529707a6d170, please check neutron logs for more information. [ 549.699775] env[61852]: ERROR nova.compute.manager [ 549.699775] env[61852]: Traceback (most recent call last): [ 549.699775] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 549.699775] env[61852]: listener.cb(fileno) [ 549.699775] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.699775] env[61852]: result = function(*args, **kwargs) [ 549.699775] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.699775] env[61852]: return func(*args, **kwargs) [ 549.699775] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.699775] env[61852]: raise e [ 549.699775] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.699775] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 549.699775] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.699775] env[61852]: created_port_ids = self._update_ports_for_instance( [ 549.699775] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.699775] env[61852]: with excutils.save_and_reraise_exception(): [ 549.699775] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.699775] env[61852]: self.force_reraise() [ 549.699775] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.699775] env[61852]: raise self.value [ 549.699775] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.699775] env[61852]: updated_port = self._update_port( [ 549.699775] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.699775] env[61852]: _ensure_no_port_binding_failure(port) [ 549.699775] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.699775] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 549.700540] env[61852]: nova.exception.PortBindingFailed: Binding failed for port bc67f150-5d95-4527-b429-529707a6d170, please check neutron logs for more information. [ 549.700540] env[61852]: Removing descriptor: 19 [ 549.700540] env[61852]: ERROR nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bc67f150-5d95-4527-b429-529707a6d170, please check neutron logs for more information. [ 549.700540] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Traceback (most recent call last): [ 549.700540] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 549.700540] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] yield resources [ 549.700540] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 549.700540] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] self.driver.spawn(context, instance, image_meta, [ 549.700540] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 549.700540] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 549.700540] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 549.700540] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] vm_ref = self.build_virtual_machine(instance, [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] vif_infos = vmwarevif.get_vif_info(self._session, [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] for vif in network_info: [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] return self._sync_wrapper(fn, *args, **kwargs) [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] self.wait() [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] self[:] = self._gt.wait() [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] return self._exit_event.wait() [ 549.701097] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] result = hub.switch() [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] return self.greenlet.switch() [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] result = function(*args, **kwargs) [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] return func(*args, **kwargs) [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] raise e [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] nwinfo = self.network_api.allocate_for_instance( [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.701703] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] created_port_ids = self._update_ports_for_instance( [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] with excutils.save_and_reraise_exception(): [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] self.force_reraise() [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] raise self.value [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] updated_port = self._update_port( [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] _ensure_no_port_binding_failure(port) [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.702095] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] raise exception.PortBindingFailed(port_id=port['id']) [ 549.702423] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] nova.exception.PortBindingFailed: Binding failed for port bc67f150-5d95-4527-b429-529707a6d170, please check neutron logs for more information. [ 549.702423] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] [ 549.702423] env[61852]: INFO nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Terminating instance [ 549.704789] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "refresh_cache-c0d84943-8398-401d-ac7b-f4436bb8325f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.704938] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquired lock "refresh_cache-c0d84943-8398-401d-ac7b-f4436bb8325f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.705151] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 549.826916] env[61852]: INFO nova.scheduler.client.report [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Deleted allocations for instance bb04c866-2e19-48e9-9aa5-89af0e56d735 [ 549.915938] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-eea17bb5-01e3-4144-a579-2a56be8154c4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.918180] env[61852]: DEBUG nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 549.918448] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 549.918795] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2c8cf1a-6f3e-415c-a7e3-58179c2949ed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.932267] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a84857-1b3f-49f1-87f8-d7cec174ec22 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.958390] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eea17bb5-01e3-4144-a579-2a56be8154c4 could not be found. [ 549.958495] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 549.958667] env[61852]: INFO nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 549.958944] env[61852]: DEBUG oslo.service.loopingcall [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.959097] env[61852]: DEBUG nova.compute.manager [-] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 549.959180] env[61852]: DEBUG nova.network.neutron [-] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 549.967214] env[61852]: DEBUG nova.network.neutron [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.991709] env[61852]: DEBUG nova.network.neutron [-] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.121801] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.990s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.122788] env[61852]: ERROR nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e413caaf-18fa-4232-9c03-9dae4bc25546, please check neutron logs for more information. [ 550.122788] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Traceback (most recent call last): [ 550.122788] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 550.122788] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] self.driver.spawn(context, instance, image_meta, [ 550.122788] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 550.122788] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.122788] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.122788] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] vm_ref = self.build_virtual_machine(instance, [ 550.122788] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.122788] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.122788] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] for vif in network_info: [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] return self._sync_wrapper(fn, *args, **kwargs) [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] self.wait() [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] self[:] = self._gt.wait() [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] return self._exit_event.wait() [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] result = hub.switch() [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.123145] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] return self.greenlet.switch() [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] result = function(*args, **kwargs) [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] return func(*args, **kwargs) [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] raise e [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] nwinfo = self.network_api.allocate_for_instance( [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] created_port_ids = self._update_ports_for_instance( [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] with excutils.save_and_reraise_exception(): [ 550.123463] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] self.force_reraise() [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] raise self.value [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] updated_port = self._update_port( [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] _ensure_no_port_binding_failure(port) [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] raise exception.PortBindingFailed(port_id=port['id']) [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] nova.exception.PortBindingFailed: Binding failed for port e413caaf-18fa-4232-9c03-9dae4bc25546, please check neutron logs for more information. [ 550.123851] env[61852]: ERROR nova.compute.manager [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] [ 550.127838] env[61852]: DEBUG nova.compute.utils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Binding failed for port e413caaf-18fa-4232-9c03-9dae4bc25546, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 550.127838] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.067s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.131121] env[61852]: DEBUG nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Build of instance e377c443-91b5-4d99-a0e8-a9731421a39e was re-scheduled: Binding failed for port e413caaf-18fa-4232-9c03-9dae4bc25546, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 550.131121] env[61852]: DEBUG nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 550.131121] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquiring lock "refresh_cache-e377c443-91b5-4d99-a0e8-a9731421a39e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.131121] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquired lock "refresh_cache-e377c443-91b5-4d99-a0e8-a9731421a39e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.131344] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 550.159426] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292662, 'name': PowerOffVM_Task, 'duration_secs': 0.118101} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.159721] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 550.159968] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 550.160801] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ff58cc-9f95-483c-ac26-09f610095251 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.170074] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 550.170328] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a23f333e-5943-4f92-a288-dbcf15f12e86 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.196585] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 550.197218] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 550.197295] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Deleting the datastore file [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 550.197601] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1faa5e13-0519-4e5c-8404-448bc6ff5195 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.207333] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 550.207333] env[61852]: value = "task-1292664" [ 550.207333] env[61852]: _type = "Task" [ 550.207333] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.220055] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.243897] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.304213] env[61852]: DEBUG nova.compute.manager [req-f5c85e74-4ea7-474f-9e11-d0e8c9b63172 req-9d4db213-2fd7-4abe-80e1-34cb6ad716f1 service nova] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Received event network-vif-deleted-0de7a94b-007f-4544-a3de-0dd7ab89898c {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 550.336121] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea5d1a3e-3fe1-4d1a-86d8-7f50fc3dc6a1 tempest-FloatingIPsAssociationNegativeTestJSON-781350432 tempest-FloatingIPsAssociationNegativeTestJSON-781350432-project-member] Lock "bb04c866-2e19-48e9-9aa5-89af0e56d735" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.105s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.338516] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "bb04c866-2e19-48e9-9aa5-89af0e56d735" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 21.104s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.339090] env[61852]: INFO nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: bb04c866-2e19-48e9-9aa5-89af0e56d735] During sync_power_state the instance has a pending task (spawning). Skip. [ 550.339090] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "bb04c866-2e19-48e9-9aa5-89af0e56d735" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.360246] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Acquiring lock "5d89c8de-69f9-432d-bb64-46d662097463" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.360666] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Lock "5d89c8de-69f9-432d-bb64-46d662097463" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.386578] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.470069] env[61852]: INFO nova.compute.manager [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] Took 1.03 seconds to deallocate network for instance. [ 550.494244] env[61852]: DEBUG nova.network.neutron [-] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.657198] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.715590] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101146} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.715855] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 550.716282] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 550.716538] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 550.766280] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.842536] env[61852]: DEBUG nova.compute.manager [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 550.846169] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Acquiring lock "ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.846169] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Lock "ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.893634] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Releasing lock "refresh_cache-c0d84943-8398-401d-ac7b-f4436bb8325f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.894067] env[61852]: DEBUG nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 550.894256] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 550.894605] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c24167f-9576-4cef-9597-9b44834ba04c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.905214] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f13512c-32c1-48d5-8a05-33d3a99ca13f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.927897] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c0d84943-8398-401d-ac7b-f4436bb8325f could not be found. [ 550.927897] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 550.928089] env[61852]: INFO nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 550.928317] env[61852]: DEBUG oslo.service.loopingcall [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 550.928615] env[61852]: DEBUG nova.compute.manager [-] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 550.928615] env[61852]: DEBUG nova.network.neutron [-] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 550.958803] env[61852]: DEBUG nova.network.neutron [-] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.000049] env[61852]: INFO nova.compute.manager [-] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Took 1.04 seconds to deallocate network for instance. [ 551.003806] env[61852]: DEBUG nova.compute.claims [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 551.003806] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.269718] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Releasing lock "refresh_cache-e377c443-91b5-4d99-a0e8-a9731421a39e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.269995] env[61852]: DEBUG nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 551.270137] env[61852]: DEBUG nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 551.270300] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 551.324056] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.373232] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.464301] env[61852]: DEBUG nova.network.neutron [-] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.503171] env[61852]: INFO nova.scheduler.client.report [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Deleted allocations for instance 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc [ 551.681975] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 551.764123] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquiring lock "23f221fd-8f76-4a6f-8189-49d9be9da7e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.764123] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "23f221fd-8f76-4a6f-8189-49d9be9da7e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.778953] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 551.778953] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 551.779270] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.779341] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 551.779473] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.779705] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 551.779821] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 551.780309] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 551.780309] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 551.780309] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 551.780539] env[61852]: DEBUG nova.virt.hardware [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 551.782507] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607c151e-c995-43b0-8052-8f4e591b5ba7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.790780] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237ced45-d63b-44ed-bda8-8c59e924bd23 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.806402] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 551.811355] env[61852]: DEBUG oslo.service.loopingcall [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 551.811424] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 551.812078] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec017afe-dbff-4056-ac07-fcf807bb3517 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.827450] env[61852]: DEBUG nova.network.neutron [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.830074] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 551.830074] env[61852]: value = "task-1292665" [ 551.830074] env[61852]: _type = "Task" [ 551.830074] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.838369] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292665, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.966978] env[61852]: INFO nova.compute.manager [-] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Took 1.04 seconds to deallocate network for instance. [ 551.972691] env[61852]: DEBUG nova.compute.claims [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 551.972837] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.015030] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d9018072-54f8-46cb-9707-d323b47bab0f tempest-ServerDiagnosticsNegativeTest-1784318948 tempest-ServerDiagnosticsNegativeTest-1784318948-project-member] Lock "62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.336s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.017685] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 22.784s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.018235] env[61852]: INFO nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc] During sync_power_state the instance has a pending task (spawning). Skip. [ 552.019496] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "62e4bcc2-c3fd-4ed2-966c-331f6c5b86dc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.187321] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance e377c443-91b5-4d99-a0e8-a9731421a39e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 552.187321] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8084d5e2-454b-4003-a9e8-b733fd0322a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 552.187321] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d75e131b-1933-4e1f-bcf1-62ed83779177 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 552.187321] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance f112b2be-fbd7-4a01-b369-25fe490e4204 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 552.188286] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance eea17bb5-01e3-4144-a579-2a56be8154c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 552.188286] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance c0d84943-8398-401d-ac7b-f4436bb8325f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 552.331384] env[61852]: INFO nova.compute.manager [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] Took 1.06 seconds to deallocate network for instance. [ 552.345116] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292665, 'name': CreateVM_Task, 'duration_secs': 0.321211} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.345116] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 552.345116] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.345116] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.345392] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 552.347392] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-824ddf3f-d986-48df-a070-d694072b82fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.353126] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 552.353126] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52066a9e-9bf8-83b8-fbc1-d8c791775697" [ 552.353126] env[61852]: _type = "Task" [ 552.353126] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.366935] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52066a9e-9bf8-83b8-fbc1-d8c791775697, 'name': SearchDatastore_Task, 'duration_secs': 0.007961} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.367371] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.367605] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 552.367835] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.367977] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.368168] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 552.368420] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1c7adfd-da70-4d81-b082-7f037969da47 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.376418] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 552.376418] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 552.377697] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db766db5-8b9c-40c4-b379-e26e26a94864 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.384677] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 552.384677] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]528da337-ab1f-0499-75a6-2fccaf96b2e3" [ 552.384677] env[61852]: _type = "Task" [ 552.384677] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.394739] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]528da337-ab1f-0499-75a6-2fccaf96b2e3, 'name': SearchDatastore_Task, 'duration_secs': 0.007491} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.398018] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6235268-2580-4537-ae0c-c935444afea2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.403392] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 552.403392] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a0d270-8899-5403-f384-633761badacf" [ 552.403392] env[61852]: _type = "Task" [ 552.403392] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.412360] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a0d270-8899-5403-f384-633761badacf, 'name': SearchDatastore_Task, 'duration_secs': 0.008183} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.413619] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.413619] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 552.413619] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bcb878b7-f55c-427a-9377-7612676a96c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.422128] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 552.422128] env[61852]: value = "task-1292666" [ 552.422128] env[61852]: _type = "Task" [ 552.422128] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.431629] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.522091] env[61852]: DEBUG nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 552.533564] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "26aba610-746f-4a3c-988c-bf5ffa44198f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.533956] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "26aba610-746f-4a3c-988c-bf5ffa44198f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.692608] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance a4736ffe-ad02-444d-bb6e-2cf4f70d64ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 552.940367] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292666, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.065343] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.128186] env[61852]: DEBUG nova.compute.manager [req-29882397-c8e9-4363-a52e-8f3d4f34b879 req-472b0262-526c-45a1-bd10-b9c3a52b413d service nova] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Received event network-changed-bc67f150-5d95-4527-b429-529707a6d170 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 553.128186] env[61852]: DEBUG nova.compute.manager [req-29882397-c8e9-4363-a52e-8f3d4f34b879 req-472b0262-526c-45a1-bd10-b9c3a52b413d service nova] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Refreshing instance network info cache due to event network-changed-bc67f150-5d95-4527-b429-529707a6d170. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 553.128390] env[61852]: DEBUG oslo_concurrency.lockutils [req-29882397-c8e9-4363-a52e-8f3d4f34b879 req-472b0262-526c-45a1-bd10-b9c3a52b413d service nova] Acquiring lock "refresh_cache-c0d84943-8398-401d-ac7b-f4436bb8325f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.128478] env[61852]: DEBUG oslo_concurrency.lockutils [req-29882397-c8e9-4363-a52e-8f3d4f34b879 req-472b0262-526c-45a1-bd10-b9c3a52b413d service nova] Acquired lock "refresh_cache-c0d84943-8398-401d-ac7b-f4436bb8325f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.128627] env[61852]: DEBUG nova.network.neutron [req-29882397-c8e9-4363-a52e-8f3d4f34b879 req-472b0262-526c-45a1-bd10-b9c3a52b413d service nova] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Refreshing network info cache for port bc67f150-5d95-4527-b429-529707a6d170 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 553.199333] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 553.365707] env[61852]: INFO nova.scheduler.client.report [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Deleted allocations for instance e377c443-91b5-4d99-a0e8-a9731421a39e [ 553.438105] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292666, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533735} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.438379] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 553.438944] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 553.438944] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4139753-fa7b-4f63-aeee-8d4b4efe49a1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.449889] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 553.449889] env[61852]: value = "task-1292667" [ 553.449889] env[61852]: _type = "Task" [ 553.449889] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.460284] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292667, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.675332] env[61852]: DEBUG nova.network.neutron [req-29882397-c8e9-4363-a52e-8f3d4f34b879 req-472b0262-526c-45a1-bd10-b9c3a52b413d service nova] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.704170] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 553.762255] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Acquiring lock "97c37446-5b86-469a-9b9b-751d0ebea463" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.762554] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Lock "97c37446-5b86-469a-9b9b-751d0ebea463" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.832192] env[61852]: DEBUG nova.network.neutron [req-29882397-c8e9-4363-a52e-8f3d4f34b879 req-472b0262-526c-45a1-bd10-b9c3a52b413d service nova] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.879542] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e1925ce6-bfc8-4dca-a1ed-1682ebb584e5 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "e377c443-91b5-4d99-a0e8-a9731421a39e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.203s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.883022] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "e377c443-91b5-4d99-a0e8-a9731421a39e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 24.646s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.883022] env[61852]: INFO nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: e377c443-91b5-4d99-a0e8-a9731421a39e] During sync_power_state the instance has a pending task (networking). Skip. [ 553.883022] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "e377c443-91b5-4d99-a0e8-a9731421a39e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.968842] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292667, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065598} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.969133] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 553.970197] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b0152e-7245-458d-92ad-9e17f245cb3b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.990878] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 553.991242] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a913891-8a03-4b2c-bb89-e8f847b4c0a3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.016308] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 554.016308] env[61852]: value = "task-1292668" [ 554.016308] env[61852]: _type = "Task" [ 554.016308] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.023657] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292668, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.206890] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 4ce41dca-63c6-447d-9c0a-00f9966e0093 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 554.335445] env[61852]: DEBUG oslo_concurrency.lockutils [req-29882397-c8e9-4363-a52e-8f3d4f34b879 req-472b0262-526c-45a1-bd10-b9c3a52b413d service nova] Releasing lock "refresh_cache-c0d84943-8398-401d-ac7b-f4436bb8325f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.335679] env[61852]: DEBUG nova.compute.manager [req-29882397-c8e9-4363-a52e-8f3d4f34b879 req-472b0262-526c-45a1-bd10-b9c3a52b413d service nova] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Received event network-vif-deleted-bc67f150-5d95-4527-b429-529707a6d170 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 554.382316] env[61852]: DEBUG nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 554.528453] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292668, 'name': ReconfigVM_Task, 'duration_secs': 0.25839} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.530039] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Reconfigured VM instance instance-00000007 to attach disk [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 554.532428] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b76651d8-c3ed-4f61-b6ee-17c72cd33c91 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.541942] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 554.541942] env[61852]: value = "task-1292669" [ 554.541942] env[61852]: _type = "Task" [ 554.541942] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.554778] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292669, 'name': Rename_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.710491] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance e9a7c08d-e021-43d0-b757-6ad0174b4648 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 554.908941] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.055905] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292669, 'name': Rename_Task, 'duration_secs': 0.129838} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.057737] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 555.058071] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cfb8aa5-2572-4b1e-873f-97385c150594 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.066132] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 555.066132] env[61852]: value = "task-1292670" [ 555.066132] env[61852]: _type = "Task" [ 555.066132] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.077799] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.186436] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Acquiring lock "90251da7-072c-45ff-899b-3fd2e0c06880" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.186866] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Lock "90251da7-072c-45ff-899b-3fd2e0c06880" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.215568] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance b5f994d9-e0aa-4335-8339-df76a1a032ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.580406] env[61852]: DEBUG oslo_vmware.api [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292670, 'name': PowerOnVM_Task, 'duration_secs': 0.440926} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.580927] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 555.581197] env[61852]: DEBUG nova.compute.manager [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 555.583103] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb77eb0a-d1ae-46aa-92cb-4b3436a844a6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.718594] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance db41ed39-0fef-48ea-9197-8d3d8844547a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.107129] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.223357] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.725934] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 0f6293bd-3096-4deb-a388-9a3e8b2e5926 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.923087] env[61852]: INFO nova.compute.manager [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Rebuilding instance [ 556.988027] env[61852]: DEBUG nova.compute.manager [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 556.988027] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05379a8a-a9ec-4cf6-a034-e97d28141921 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.229600] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 068ced45-4c50-4cfd-bd94-fa1dad29e5b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.330784] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquiring lock "394a7258-a9e0-4b16-a125-01e8cdfe7026" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.331614] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "394a7258-a9e0-4b16-a125-01e8cdfe7026" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.503330] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 557.503330] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e91de9d5-da8b-49d8-9479-46af86700722 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.511763] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for the task: (returnval){ [ 557.511763] env[61852]: value = "task-1292671" [ 557.511763] env[61852]: _type = "Task" [ 557.511763] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.523974] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.734029] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8c872e97-44ca-48c9-b7bb-02dca695ad8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.958974] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3123578c-b306-4815-8852-60a87e7ff699 tempest-ServersListShow296Test-1500905678 tempest-ServersListShow296Test-1500905678-project-member] Acquiring lock "39cd0e07-1378-40ae-a406-90c77df15146" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.959290] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3123578c-b306-4815-8852-60a87e7ff699 tempest-ServersListShow296Test-1500905678 tempest-ServersListShow296Test-1500905678-project-member] Lock "39cd0e07-1378-40ae-a406-90c77df15146" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.025303] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292671, 'name': PowerOffVM_Task, 'duration_secs': 0.232639} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.025612] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 558.025994] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 558.026996] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d331516-f9d7-4416-a8db-baf9852ff0eb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.034626] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 558.034898] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3378d963-cd47-463f-bf8e-f8c6dbeb1993 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.060557] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 558.060774] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 558.060949] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Deleting the datastore file [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 558.062101] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f70acf3-6db2-4ec2-a240-76f847567683 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.069121] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for the task: (returnval){ [ 558.069121] env[61852]: value = "task-1292673" [ 558.069121] env[61852]: _type = "Task" [ 558.069121] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.077222] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.238286] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 5d89c8de-69f9-432d-bb64-46d662097463 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.589859] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23646} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.589859] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 558.589859] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 558.589859] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 558.743217] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance ab92661d-d5e3-4e7a-b6c3-48d48bf795b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.744388] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 558.744388] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 559.241412] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e2a2e5-d19f-4de4-88a9-41d53b7ab23b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.250541] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8db3a3-b7f1-41dd-8e48-e53ea3c45c7c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.285950] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ed046c-0e69-4cb8-9cee-6bc3fc07757a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.296537] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b395ac-a54e-4eb9-833a-fd2263fb973c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.310627] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.646308] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 559.646579] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 559.646865] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 559.646927] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 559.647106] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 559.647268] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 559.647588] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 559.647714] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 559.648142] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 559.648142] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 559.648349] env[61852]: DEBUG nova.virt.hardware [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 559.649261] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1794ff19-71da-4983-b933-adc078ad183f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.657995] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdbed93-4f08-439f-8fd2-dcef4376595e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.675023] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 559.682758] env[61852]: DEBUG oslo.service.loopingcall [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 559.683042] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 559.683264] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2348ead5-aaa0-41a9-8aaf-9b62c8013155 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.701407] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 559.701407] env[61852]: value = "task-1292674" [ 559.701407] env[61852]: _type = "Task" [ 559.701407] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.710328] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292674, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.816744] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 560.212825] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292674, 'name': CreateVM_Task, 'duration_secs': 0.29214} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.213170] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 560.213946] env[61852]: DEBUG oslo_concurrency.lockutils [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.214156] env[61852]: DEBUG oslo_concurrency.lockutils [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.214506] env[61852]: DEBUG oslo_concurrency.lockutils [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 560.214779] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc2416a4-ef30-4bef-a4c9-b754e5433342 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.220791] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for the task: (returnval){ [ 560.220791] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52775d04-48e9-d768-e3f0-f0f0e8f31bba" [ 560.220791] env[61852]: _type = "Task" [ 560.220791] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.230768] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52775d04-48e9-d768-e3f0-f0f0e8f31bba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.326412] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 560.327237] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.201s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.327237] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.418s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.330180] env[61852]: INFO nova.compute.claims [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 560.333513] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 560.333513] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Getting list of instances from cluster (obj){ [ 560.333513] env[61852]: value = "domain-c8" [ 560.333513] env[61852]: _type = "ClusterComputeResource" [ 560.333513] env[61852]: } {{(pid=61852) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 560.340019] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9135b996-478b-4a3a-9b1f-813359b7574a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.349387] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Got total of 1 instances {{(pid=61852) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 560.738200] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52775d04-48e9-d768-e3f0-f0f0e8f31bba, 'name': SearchDatastore_Task, 'duration_secs': 0.008641} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.738515] env[61852]: DEBUG oslo_concurrency.lockutils [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.738733] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 560.738965] env[61852]: DEBUG oslo_concurrency.lockutils [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.739121] env[61852]: DEBUG oslo_concurrency.lockutils [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.739331] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 560.739586] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-347618b2-38c8-42a2-8628-806c37cde911 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.748319] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 560.748441] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 560.749224] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22e0d6be-3a40-4702-8112-9fabcf7b51a3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.757026] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for the task: (returnval){ [ 560.757026] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f8c9df-009a-87ef-2a9a-79ff53a19eca" [ 560.757026] env[61852]: _type = "Task" [ 560.757026] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.764174] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f8c9df-009a-87ef-2a9a-79ff53a19eca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.272085] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f8c9df-009a-87ef-2a9a-79ff53a19eca, 'name': SearchDatastore_Task, 'duration_secs': 0.008159} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.272955] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c5da948-1c5c-4002-b69f-fe042e19af07 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.282388] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for the task: (returnval){ [ 561.282388] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ace7e4-35b9-dbf8-03aa-a06b0069d65e" [ 561.282388] env[61852]: _type = "Task" [ 561.282388] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.292747] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ace7e4-35b9-dbf8-03aa-a06b0069d65e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.298533] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 561.298625] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 561.298764] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 561.299464] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Rebuilding the list of instances to heal {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 561.797618] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ace7e4-35b9-dbf8-03aa-a06b0069d65e, 'name': SearchDatastore_Task, 'duration_secs': 0.00938} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.801910] env[61852]: DEBUG oslo_concurrency.lockutils [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.802211] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 561.803971] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-293ff0a0-58bb-44d3-8f8a-f2b678ef9b50 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.808293] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 561.808516] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 561.808694] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 561.808865] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 561.809047] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 561.810178] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "refresh_cache-d75e131b-1933-4e1f-bcf1-62ed83779177" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.810388] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquired lock "refresh_cache-d75e131b-1933-4e1f-bcf1-62ed83779177" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.810583] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Forcefully refreshing network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 561.810803] env[61852]: DEBUG nova.objects.instance [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lazy-loading 'info_cache' on Instance uuid d75e131b-1933-4e1f-bcf1-62ed83779177 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 561.821300] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for the task: (returnval){ [ 561.821300] env[61852]: value = "task-1292675" [ 561.821300] env[61852]: _type = "Task" [ 561.821300] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.835513] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292675, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.846606] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbece57-bd4e-4643-bfae-ff710edce302 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.855806] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fc240a-ccfb-4984-b386-4361d635b423 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.895116] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf3a6d6-5875-4be1-a743-90dec5660cfd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.904121] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919cf261-dc38-41bf-8d0e-255b089d8f7b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.918549] env[61852]: DEBUG nova.compute.provider_tree [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.253286] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.253286] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.341961] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292675, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.423108] env[61852]: DEBUG nova.scheduler.client.report [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 562.834866] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292675, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537644} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.835157] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 562.835344] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 562.835588] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e52f021-41bd-4571-a95c-ceb35e28668a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.842608] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for the task: (returnval){ [ 562.842608] env[61852]: value = "task-1292676" [ 562.842608] env[61852]: _type = "Task" [ 562.842608] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.843033] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 562.851355] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292676, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.931290] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.601s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.932182] env[61852]: DEBUG nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 562.938989] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.768s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.363297] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292676, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078562} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.363591] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 563.364739] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26975d10-03ba-4aaa-b583-231d293c3a62 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.388941] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 563.392810] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8595c9e-4930-439b-8e59-0e1e83d06b0a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.420185] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for the task: (returnval){ [ 563.420185] env[61852]: value = "task-1292677" [ 563.420185] env[61852]: _type = "Task" [ 563.420185] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.431850] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292677, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.451055] env[61852]: DEBUG nova.compute.utils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 563.459118] env[61852]: DEBUG nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 563.460683] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 563.675360] env[61852]: DEBUG nova.policy [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '110b17cafa7b4577bf39375eb2688666', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47e2068ccd4b4b0d977b5657121df21b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 563.743126] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.933804] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292677, 'name': ReconfigVM_Task, 'duration_secs': 0.279322} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.934135] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Reconfigured VM instance instance-00000007 to attach disk [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177/d75e131b-1933-4e1f-bcf1-62ed83779177.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 563.934872] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ec8e1d9-3b24-4a5f-b038-42cafe7411e6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.945921] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for the task: (returnval){ [ 563.945921] env[61852]: value = "task-1292678" [ 563.945921] env[61852]: _type = "Task" [ 563.945921] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.957509] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292678, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.963491] env[61852]: DEBUG nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 563.967332] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a670a556-610d-454f-96da-c84a1eb9d303 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.976765] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77f2845-5260-44aa-aab5-6e2381207ec2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.022087] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc43029-eb40-4f74-83b6-b8869c074dbc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.031224] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515fa628-1808-47c7-8b0d-b0034246859b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.052375] env[61852]: DEBUG nova.compute.provider_tree [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.247612] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Releasing lock "refresh_cache-d75e131b-1933-4e1f-bcf1-62ed83779177" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.247612] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Updated the network info_cache for instance {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 564.247612] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.247612] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.247612] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.247945] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.247982] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.456419] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292678, 'name': Rename_Task, 'duration_secs': 0.137439} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.456768] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 564.457057] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c512f1aa-d8c9-43e6-86b8-ec387dc2e074 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.463753] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Waiting for the task: (returnval){ [ 564.463753] env[61852]: value = "task-1292679" [ 564.463753] env[61852]: _type = "Task" [ 564.463753] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.484216] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.556063] env[61852]: DEBUG nova.scheduler.client.report [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 564.805896] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Successfully created port: 26aa5454-3290-42f7-909e-df1f87ecd38a {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.844470] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Acquiring lock "b566ea57-9b1a-4869-be7c-9ba579db25dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.844704] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Lock "b566ea57-9b1a-4869-be7c-9ba579db25dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.974433] env[61852]: DEBUG oslo_vmware.api [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Task: {'id': task-1292679, 'name': PowerOnVM_Task, 'duration_secs': 0.444197} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.974716] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 564.974916] env[61852]: DEBUG nova.compute.manager [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 564.976083] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddefaa0-f8fc-41a8-a310-0aeaa5b8c55d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.983282] env[61852]: DEBUG nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 565.012356] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 565.013258] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 565.013258] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 565.013514] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 565.013706] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 565.015429] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 565.015429] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 565.015429] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 565.015429] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 565.015429] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 565.015793] env[61852]: DEBUG nova.virt.hardware [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 565.016191] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4f3628-0041-42d9-a1aa-85f93991160c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.026480] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5c7226-5851-4046-a386-b7fa38a3f6cb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.061319] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.122s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.064829] env[61852]: ERROR nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port de358c72-6a90-452b-8bba-7f78a88b4b17, please check neutron logs for more information. [ 565.064829] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Traceback (most recent call last): [ 565.064829] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 565.064829] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] self.driver.spawn(context, instance, image_meta, [ 565.064829] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 565.064829] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 565.064829] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 565.064829] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] vm_ref = self.build_virtual_machine(instance, [ 565.064829] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 565.064829] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] vif_infos = vmwarevif.get_vif_info(self._session, [ 565.064829] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] for vif in network_info: [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] return self._sync_wrapper(fn, *args, **kwargs) [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] self.wait() [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] self[:] = self._gt.wait() [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] return self._exit_event.wait() [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] result = hub.switch() [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 565.065391] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] return self.greenlet.switch() [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] result = function(*args, **kwargs) [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] return func(*args, **kwargs) [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] raise e [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] nwinfo = self.network_api.allocate_for_instance( [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] created_port_ids = self._update_ports_for_instance( [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] with excutils.save_and_reraise_exception(): [ 565.066496] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] self.force_reraise() [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] raise self.value [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] updated_port = self._update_port( [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] _ensure_no_port_binding_failure(port) [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] raise exception.PortBindingFailed(port_id=port['id']) [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] nova.exception.PortBindingFailed: Binding failed for port de358c72-6a90-452b-8bba-7f78a88b4b17, please check neutron logs for more information. [ 565.066869] env[61852]: ERROR nova.compute.manager [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] [ 565.067550] env[61852]: DEBUG nova.compute.utils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Binding failed for port de358c72-6a90-452b-8bba-7f78a88b4b17, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 565.067550] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.288s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.069290] env[61852]: INFO nova.compute.claims [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 565.072472] env[61852]: DEBUG nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Build of instance 8084d5e2-454b-4003-a9e8-b733fd0322a3 was re-scheduled: Binding failed for port de358c72-6a90-452b-8bba-7f78a88b4b17, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 565.074850] env[61852]: DEBUG nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 565.074850] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Acquiring lock "refresh_cache-8084d5e2-454b-4003-a9e8-b733fd0322a3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.074850] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Acquired lock "refresh_cache-8084d5e2-454b-4003-a9e8-b733fd0322a3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.074850] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 565.503169] env[61852]: DEBUG oslo_concurrency.lockutils [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.613550] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.749028] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.258822] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Releasing lock "refresh_cache-8084d5e2-454b-4003-a9e8-b733fd0322a3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.258822] env[61852]: DEBUG nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 566.258822] env[61852]: DEBUG nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 566.258822] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 566.294219] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Acquiring lock "144d5486-d438-4bca-9b68-c414cc1f4659" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.294473] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Lock "144d5486-d438-4bca-9b68-c414cc1f4659" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.297772] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.304567] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Acquiring lock "0b213475-347e-42c9-aa16-0abd570d1a3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.304809] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Lock "0b213475-347e-42c9-aa16-0abd570d1a3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.496521] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c449ff33-e59c-4b03-a096-85178e35c2c8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.506164] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5a5df6-9f82-480c-aba6-ae0cf23b28a0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.534465] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fe53da-5540-44b3-8130-4cb7ea0daf80 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.545158] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0548e3-58c6-4cab-b1aa-040b4d6a6182 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.556189] env[61852]: DEBUG nova.compute.provider_tree [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.803579] env[61852]: DEBUG nova.network.neutron [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.948013] env[61852]: DEBUG nova.compute.manager [req-27084c1e-dec9-40f9-b5b9-8ca5ab154ead req-d1615778-e385-4f92-ae98-5eac62de331b service nova] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Received event network-changed-26aa5454-3290-42f7-909e-df1f87ecd38a {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 566.948470] env[61852]: DEBUG nova.compute.manager [req-27084c1e-dec9-40f9-b5b9-8ca5ab154ead req-d1615778-e385-4f92-ae98-5eac62de331b service nova] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Refreshing instance network info cache due to event network-changed-26aa5454-3290-42f7-909e-df1f87ecd38a. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 566.948772] env[61852]: DEBUG oslo_concurrency.lockutils [req-27084c1e-dec9-40f9-b5b9-8ca5ab154ead req-d1615778-e385-4f92-ae98-5eac62de331b service nova] Acquiring lock "refresh_cache-a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.948945] env[61852]: DEBUG oslo_concurrency.lockutils [req-27084c1e-dec9-40f9-b5b9-8ca5ab154ead req-d1615778-e385-4f92-ae98-5eac62de331b service nova] Acquired lock "refresh_cache-a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.949120] env[61852]: DEBUG nova.network.neutron [req-27084c1e-dec9-40f9-b5b9-8ca5ab154ead req-d1615778-e385-4f92-ae98-5eac62de331b service nova] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Refreshing network info cache for port 26aa5454-3290-42f7-909e-df1f87ecd38a {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 567.059702] env[61852]: DEBUG nova.scheduler.client.report [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 567.256029] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "d75e131b-1933-4e1f-bcf1-62ed83779177" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.256500] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "d75e131b-1933-4e1f-bcf1-62ed83779177" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.256751] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "d75e131b-1933-4e1f-bcf1-62ed83779177-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.256968] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "d75e131b-1933-4e1f-bcf1-62ed83779177-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.257370] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "d75e131b-1933-4e1f-bcf1-62ed83779177-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.260093] env[61852]: INFO nova.compute.manager [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Terminating instance [ 567.262777] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "refresh_cache-d75e131b-1933-4e1f-bcf1-62ed83779177" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.262777] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquired lock "refresh_cache-d75e131b-1933-4e1f-bcf1-62ed83779177" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.262966] env[61852]: DEBUG nova.network.neutron [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 567.312133] env[61852]: INFO nova.compute.manager [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] Took 1.05 seconds to deallocate network for instance. [ 567.366947] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Acquiring lock "f9e90a57-da19-4b1a-81cb-8a6433e09785" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.371393] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Lock "f9e90a57-da19-4b1a-81cb-8a6433e09785" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.493854] env[61852]: DEBUG nova.network.neutron [req-27084c1e-dec9-40f9-b5b9-8ca5ab154ead req-d1615778-e385-4f92-ae98-5eac62de331b service nova] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.565497] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.565497] env[61852]: DEBUG nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 567.569347] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.556s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.577551] env[61852]: ERROR nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 26aa5454-3290-42f7-909e-df1f87ecd38a, please check neutron logs for more information. [ 567.577551] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 567.577551] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.577551] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 567.577551] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.577551] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 567.577551] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.577551] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 567.577551] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.577551] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 567.577551] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.577551] env[61852]: ERROR nova.compute.manager raise self.value [ 567.577551] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.577551] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 567.577551] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.577551] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 567.581570] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.581570] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 567.581570] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 26aa5454-3290-42f7-909e-df1f87ecd38a, please check neutron logs for more information. [ 567.581570] env[61852]: ERROR nova.compute.manager [ 567.581570] env[61852]: Traceback (most recent call last): [ 567.581570] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 567.581570] env[61852]: listener.cb(fileno) [ 567.581570] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.581570] env[61852]: result = function(*args, **kwargs) [ 567.581570] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 567.581570] env[61852]: return func(*args, **kwargs) [ 567.581570] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.581570] env[61852]: raise e [ 567.581570] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.581570] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 567.581570] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.581570] env[61852]: created_port_ids = self._update_ports_for_instance( [ 567.581570] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.581570] env[61852]: with excutils.save_and_reraise_exception(): [ 567.581570] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.581570] env[61852]: self.force_reraise() [ 567.581570] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.581570] env[61852]: raise self.value [ 567.581570] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.581570] env[61852]: updated_port = self._update_port( [ 567.581570] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.581570] env[61852]: _ensure_no_port_binding_failure(port) [ 567.581570] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.581570] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 567.582682] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 26aa5454-3290-42f7-909e-df1f87ecd38a, please check neutron logs for more information. [ 567.582682] env[61852]: Removing descriptor: 16 [ 567.582682] env[61852]: ERROR nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 26aa5454-3290-42f7-909e-df1f87ecd38a, please check neutron logs for more information. [ 567.582682] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Traceback (most recent call last): [ 567.582682] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 567.582682] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] yield resources [ 567.582682] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 567.582682] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] self.driver.spawn(context, instance, image_meta, [ 567.582682] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 567.582682] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 567.582682] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 567.582682] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] vm_ref = self.build_virtual_machine(instance, [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] vif_infos = vmwarevif.get_vif_info(self._session, [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] for vif in network_info: [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] return self._sync_wrapper(fn, *args, **kwargs) [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] self.wait() [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] self[:] = self._gt.wait() [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] return self._exit_event.wait() [ 567.583134] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] result = hub.switch() [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] return self.greenlet.switch() [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] result = function(*args, **kwargs) [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] return func(*args, **kwargs) [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] raise e [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] nwinfo = self.network_api.allocate_for_instance( [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.583530] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] created_port_ids = self._update_ports_for_instance( [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] with excutils.save_and_reraise_exception(): [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] self.force_reraise() [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] raise self.value [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] updated_port = self._update_port( [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] _ensure_no_port_binding_failure(port) [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.583905] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] raise exception.PortBindingFailed(port_id=port['id']) [ 567.585166] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] nova.exception.PortBindingFailed: Binding failed for port 26aa5454-3290-42f7-909e-df1f87ecd38a, please check neutron logs for more information. [ 567.585166] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] [ 567.585166] env[61852]: INFO nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Terminating instance [ 567.585166] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Acquiring lock "refresh_cache-a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.723607] env[61852]: DEBUG nova.network.neutron [req-27084c1e-dec9-40f9-b5b9-8ca5ab154ead req-d1615778-e385-4f92-ae98-5eac62de331b service nova] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.794473] env[61852]: DEBUG nova.network.neutron [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.907254] env[61852]: DEBUG nova.network.neutron [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.085278] env[61852]: DEBUG nova.compute.utils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 568.085371] env[61852]: DEBUG nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 568.085484] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 568.227503] env[61852]: DEBUG oslo_concurrency.lockutils [req-27084c1e-dec9-40f9-b5b9-8ca5ab154ead req-d1615778-e385-4f92-ae98-5eac62de331b service nova] Releasing lock "refresh_cache-a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.227503] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Acquired lock "refresh_cache-a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.227503] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 568.266199] env[61852]: DEBUG nova.policy [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7fbf75982c374bc688512dedeea64637', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8a316d2b5fb4bb2b169c26f33b5c8d0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 568.349418] env[61852]: INFO nova.scheduler.client.report [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Deleted allocations for instance 8084d5e2-454b-4003-a9e8-b733fd0322a3 [ 568.412739] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Releasing lock "refresh_cache-d75e131b-1933-4e1f-bcf1-62ed83779177" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.413185] env[61852]: DEBUG nova.compute.manager [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 568.413376] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 568.414266] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8591c567-154d-432f-9d70-efdc973d079d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.422154] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 568.424691] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4269eaa4-99b2-4b87-85cd-6b5c4f0703ad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.433395] env[61852]: DEBUG oslo_vmware.api [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 568.433395] env[61852]: value = "task-1292680" [ 568.433395] env[61852]: _type = "Task" [ 568.433395] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.443430] env[61852]: DEBUG oslo_vmware.api [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292680, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.550901] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31f0f82-4099-448a-a280-4ae9220c1877 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.562996] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95adfe8c-d472-4916-9ee0-cdf95c51114f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.593710] env[61852]: DEBUG nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 568.603953] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5477328a-cda4-47bc-90a9-83832d958987 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.614608] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd79df1-614f-44aa-8639-82878ccdaa45 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.633184] env[61852]: DEBUG nova.compute.provider_tree [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.764122] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.860386] env[61852]: DEBUG oslo_concurrency.lockutils [None req-02f37103-fa92-4e4a-a1b8-e3cf478d0336 tempest-ServerDiagnosticsTest-496884514 tempest-ServerDiagnosticsTest-496884514-project-member] Lock "8084d5e2-454b-4003-a9e8-b733fd0322a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.743s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.866250] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "8084d5e2-454b-4003-a9e8-b733fd0322a3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 39.631s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.866734] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61e78209-9306-423d-b06d-e3434252199c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.876036] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d824cab9-2bfe-4858-8a50-7f54212a3e89 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.952722] env[61852]: DEBUG oslo_vmware.api [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292680, 'name': PowerOffVM_Task, 'duration_secs': 0.121962} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.953012] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 568.953188] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 568.953432] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-806553f3-b1b7-4931-b815-8ebede60d766 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.978818] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 568.980584] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 568.980584] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Deleting the datastore file [datastore1] d75e131b-1933-4e1f-bcf1-62ed83779177 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 568.980584] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e17db44-741f-472c-9d79-b972775f4c81 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.986375] env[61852]: DEBUG oslo_vmware.api [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for the task: (returnval){ [ 568.986375] env[61852]: value = "task-1292682" [ 568.986375] env[61852]: _type = "Task" [ 568.986375] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.992239] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.995074] env[61852]: DEBUG nova.compute.manager [req-10a7ec05-a0e5-4992-a071-59df51a261b4 req-5451d6ab-68b8-4731-acc2-d2502fa7f02b service nova] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Received event network-vif-deleted-26aa5454-3290-42f7-909e-df1f87ecd38a {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 569.000702] env[61852]: DEBUG oslo_vmware.api [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292682, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.081157] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Successfully created port: 2ad5fcff-857c-46b7-a1e1-23ee0f089406 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.136032] env[61852]: DEBUG nova.scheduler.client.report [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 569.367726] env[61852]: DEBUG nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 569.406269] env[61852]: INFO nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 8084d5e2-454b-4003-a9e8-b733fd0322a3] During the sync_power process the instance has moved from host None to host cpu-1 [ 569.406904] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "8084d5e2-454b-4003-a9e8-b733fd0322a3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.497397] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Releasing lock "refresh_cache-a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.498085] env[61852]: DEBUG nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 569.498085] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 569.498338] env[61852]: DEBUG oslo_vmware.api [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Task: {'id': task-1292682, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102863} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.498535] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be3d733c-c64f-430f-8dcf-2e636b052fd9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.500702] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 569.500885] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 569.501118] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 569.501359] env[61852]: INFO nova.compute.manager [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Took 1.09 seconds to destroy the instance on the hypervisor. [ 569.501593] env[61852]: DEBUG oslo.service.loopingcall [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 569.501776] env[61852]: DEBUG nova.compute.manager [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 569.502076] env[61852]: DEBUG nova.network.neutron [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 569.511105] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3a97d4-9c75-472c-9e45-28ebf48793cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.532947] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a4736ffe-ad02-444d-bb6e-2cf4f70d64ee could not be found. [ 569.533227] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 569.533304] env[61852]: INFO nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Took 0.04 seconds to destroy the instance on the hypervisor. [ 569.533492] env[61852]: DEBUG oslo.service.loopingcall [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 569.533697] env[61852]: DEBUG nova.compute.manager [-] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 569.533856] env[61852]: DEBUG nova.network.neutron [-] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 569.536308] env[61852]: DEBUG nova.network.neutron [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.587905] env[61852]: DEBUG nova.network.neutron [-] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.614634] env[61852]: DEBUG nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 569.642787] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 569.643148] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 569.643220] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.643359] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 569.643524] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.643683] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 569.643922] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 569.644057] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 569.644239] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 569.644414] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 569.644609] env[61852]: DEBUG nova.virt.hardware [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 569.645327] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.077s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.645989] env[61852]: ERROR nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9fd5ccc7-d886-4008-8822-f7e5ae8d217f, please check neutron logs for more information. [ 569.645989] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Traceback (most recent call last): [ 569.645989] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 569.645989] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] self.driver.spawn(context, instance, image_meta, [ 569.645989] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 569.645989] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] self._vmops.spawn(context, instance, image_meta, injected_files, [ 569.645989] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 569.645989] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] vm_ref = self.build_virtual_machine(instance, [ 569.645989] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 569.645989] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] vif_infos = vmwarevif.get_vif_info(self._session, [ 569.645989] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] for vif in network_info: [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] return self._sync_wrapper(fn, *args, **kwargs) [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] self.wait() [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] self[:] = self._gt.wait() [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] return self._exit_event.wait() [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] result = hub.switch() [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 569.646372] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] return self.greenlet.switch() [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] result = function(*args, **kwargs) [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] return func(*args, **kwargs) [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] raise e [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] nwinfo = self.network_api.allocate_for_instance( [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] created_port_ids = self._update_ports_for_instance( [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] with excutils.save_and_reraise_exception(): [ 569.646790] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] self.force_reraise() [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] raise self.value [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] updated_port = self._update_port( [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] _ensure_no_port_binding_failure(port) [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] raise exception.PortBindingFailed(port_id=port['id']) [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] nova.exception.PortBindingFailed: Binding failed for port 9fd5ccc7-d886-4008-8822-f7e5ae8d217f, please check neutron logs for more information. [ 569.647198] env[61852]: ERROR nova.compute.manager [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] [ 569.647507] env[61852]: DEBUG nova.compute.utils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Binding failed for port 9fd5ccc7-d886-4008-8822-f7e5ae8d217f, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 569.648335] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffd21f1-bbe6-42e0-969f-355ed3e80e4c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.651166] env[61852]: DEBUG nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Build of instance f112b2be-fbd7-4a01-b369-25fe490e4204 was re-scheduled: Binding failed for port 9fd5ccc7-d886-4008-8822-f7e5ae8d217f, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 569.651734] env[61852]: DEBUG nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 569.651830] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "refresh_cache-f112b2be-fbd7-4a01-b369-25fe490e4204" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.651968] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquired lock "refresh_cache-f112b2be-fbd7-4a01-b369-25fe490e4204" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.652184] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 569.653506] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.390s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.655485] env[61852]: INFO nova.compute.claims [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.666969] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3735d49-e5cc-4fda-9e3c-b05c5aeb5d85 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.914980] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.040022] env[61852]: DEBUG nova.network.neutron [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.091045] env[61852]: DEBUG nova.network.neutron [-] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.190107] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.346981] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.533043] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Acquiring lock "593106da-0c81-448a-b3ba-fd6007dcdd98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.533713] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Lock "593106da-0c81-448a-b3ba-fd6007dcdd98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.542735] env[61852]: INFO nova.compute.manager [-] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Took 1.04 seconds to deallocate network for instance. [ 570.594432] env[61852]: INFO nova.compute.manager [-] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Took 1.06 seconds to deallocate network for instance. [ 570.601304] env[61852]: DEBUG nova.compute.claims [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 570.601569] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.788387] env[61852]: ERROR nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2ad5fcff-857c-46b7-a1e1-23ee0f089406, please check neutron logs for more information. [ 570.788387] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 570.788387] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.788387] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 570.788387] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.788387] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 570.788387] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.788387] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 570.788387] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.788387] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 570.788387] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.788387] env[61852]: ERROR nova.compute.manager raise self.value [ 570.788387] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.788387] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 570.788387] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.788387] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 570.788976] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.788976] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 570.788976] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2ad5fcff-857c-46b7-a1e1-23ee0f089406, please check neutron logs for more information. [ 570.788976] env[61852]: ERROR nova.compute.manager [ 570.788976] env[61852]: Traceback (most recent call last): [ 570.788976] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 570.788976] env[61852]: listener.cb(fileno) [ 570.788976] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.788976] env[61852]: result = function(*args, **kwargs) [ 570.788976] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 570.788976] env[61852]: return func(*args, **kwargs) [ 570.788976] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 570.788976] env[61852]: raise e [ 570.788976] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.788976] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 570.788976] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.788976] env[61852]: created_port_ids = self._update_ports_for_instance( [ 570.788976] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.788976] env[61852]: with excutils.save_and_reraise_exception(): [ 570.788976] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.788976] env[61852]: self.force_reraise() [ 570.788976] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.788976] env[61852]: raise self.value [ 570.788976] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.788976] env[61852]: updated_port = self._update_port( [ 570.788976] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.788976] env[61852]: _ensure_no_port_binding_failure(port) [ 570.788976] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.788976] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 570.789896] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 2ad5fcff-857c-46b7-a1e1-23ee0f089406, please check neutron logs for more information. [ 570.789896] env[61852]: Removing descriptor: 19 [ 570.789896] env[61852]: ERROR nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2ad5fcff-857c-46b7-a1e1-23ee0f089406, please check neutron logs for more information. [ 570.789896] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Traceback (most recent call last): [ 570.789896] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 570.789896] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] yield resources [ 570.789896] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 570.789896] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] self.driver.spawn(context, instance, image_meta, [ 570.789896] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 570.789896] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 570.789896] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 570.789896] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] vm_ref = self.build_virtual_machine(instance, [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] vif_infos = vmwarevif.get_vif_info(self._session, [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] for vif in network_info: [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] return self._sync_wrapper(fn, *args, **kwargs) [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] self.wait() [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] self[:] = self._gt.wait() [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] return self._exit_event.wait() [ 570.790372] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] result = hub.switch() [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] return self.greenlet.switch() [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] result = function(*args, **kwargs) [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] return func(*args, **kwargs) [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] raise e [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] nwinfo = self.network_api.allocate_for_instance( [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.790788] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] created_port_ids = self._update_ports_for_instance( [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] with excutils.save_and_reraise_exception(): [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] self.force_reraise() [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] raise self.value [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] updated_port = self._update_port( [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] _ensure_no_port_binding_failure(port) [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.791194] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] raise exception.PortBindingFailed(port_id=port['id']) [ 570.791587] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] nova.exception.PortBindingFailed: Binding failed for port 2ad5fcff-857c-46b7-a1e1-23ee0f089406, please check neutron logs for more information. [ 570.791587] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] [ 570.791587] env[61852]: INFO nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Terminating instance [ 570.794853] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Acquiring lock "refresh_cache-d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.794853] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Acquired lock "refresh_cache-d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.794983] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 570.853852] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Releasing lock "refresh_cache-f112b2be-fbd7-4a01-b369-25fe490e4204" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.853852] env[61852]: DEBUG nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 570.853852] env[61852]: DEBUG nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 570.853852] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 570.875027] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.052609] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.139972] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fab795b-dad2-4c2f-8203-533c058e1fa2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.152112] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80b8ea2-8f46-401b-aaea-b1ec5e1310b3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.191488] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e4a5f9-dbfc-41fc-a235-452521bf9179 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.199688] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17814926-d942-4aa9-aaf6-6c5c65588c66 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.214820] env[61852]: DEBUG nova.compute.provider_tree [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.296218] env[61852]: DEBUG nova.compute.manager [req-b6649128-66f6-467a-91f8-c583c4d17189 req-1719986f-01df-4c82-8637-a664436ae2bc service nova] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Received event network-changed-2ad5fcff-857c-46b7-a1e1-23ee0f089406 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 571.296412] env[61852]: DEBUG nova.compute.manager [req-b6649128-66f6-467a-91f8-c583c4d17189 req-1719986f-01df-4c82-8637-a664436ae2bc service nova] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Refreshing instance network info cache due to event network-changed-2ad5fcff-857c-46b7-a1e1-23ee0f089406. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 571.296602] env[61852]: DEBUG oslo_concurrency.lockutils [req-b6649128-66f6-467a-91f8-c583c4d17189 req-1719986f-01df-4c82-8637-a664436ae2bc service nova] Acquiring lock "refresh_cache-d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.327922] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.376756] env[61852]: DEBUG nova.network.neutron [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.417850] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.721681] env[61852]: DEBUG nova.scheduler.client.report [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.753189] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "48b40da3-1efc-4557-a791-e88158338aec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.753362] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "48b40da3-1efc-4557-a791-e88158338aec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.779403] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Acquiring lock "29cb49fe-627a-4f0f-919b-58f764cd63d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.779714] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Lock "29cb49fe-627a-4f0f-919b-58f764cd63d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.879853] env[61852]: INFO nova.compute.manager [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: f112b2be-fbd7-4a01-b369-25fe490e4204] Took 1.03 seconds to deallocate network for instance. [ 571.923022] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Releasing lock "refresh_cache-d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.923022] env[61852]: DEBUG nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 571.923022] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 571.923022] env[61852]: DEBUG oslo_concurrency.lockutils [req-b6649128-66f6-467a-91f8-c583c4d17189 req-1719986f-01df-4c82-8637-a664436ae2bc service nova] Acquired lock "refresh_cache-d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.923022] env[61852]: DEBUG nova.network.neutron [req-b6649128-66f6-467a-91f8-c583c4d17189 req-1719986f-01df-4c82-8637-a664436ae2bc service nova] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Refreshing network info cache for port 2ad5fcff-857c-46b7-a1e1-23ee0f089406 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 571.923718] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ddfb96c0-66d5-470b-947f-4bc8b6edb103 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.931565] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c49f6c0-6bcc-442f-ba8d-4c2c86b6fae6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.955271] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6 could not be found. [ 571.955567] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 571.955794] env[61852]: INFO nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Took 0.03 seconds to destroy the instance on the hypervisor. [ 571.956119] env[61852]: DEBUG oslo.service.loopingcall [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 571.956399] env[61852]: DEBUG nova.compute.manager [-] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 571.956722] env[61852]: DEBUG nova.network.neutron [-] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 571.984263] env[61852]: DEBUG nova.network.neutron [-] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.226413] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.227179] env[61852]: DEBUG nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 572.229802] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.227s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.447081] env[61852]: DEBUG nova.network.neutron [req-b6649128-66f6-467a-91f8-c583c4d17189 req-1719986f-01df-4c82-8637-a664436ae2bc service nova] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.488344] env[61852]: DEBUG nova.network.neutron [-] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.600555] env[61852]: DEBUG nova.network.neutron [req-b6649128-66f6-467a-91f8-c583c4d17189 req-1719986f-01df-4c82-8637-a664436ae2bc service nova] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.742581] env[61852]: DEBUG nova.compute.utils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 572.743926] env[61852]: DEBUG nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 572.744108] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 572.812654] env[61852]: DEBUG nova.policy [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd032b9d26a72416885d080dc2922e815', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01d1223a327f4442b01edf22f3fc578a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 572.934147] env[61852]: INFO nova.scheduler.client.report [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Deleted allocations for instance f112b2be-fbd7-4a01-b369-25fe490e4204 [ 572.991503] env[61852]: INFO nova.compute.manager [-] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Took 1.03 seconds to deallocate network for instance. [ 572.997905] env[61852]: DEBUG nova.compute.claims [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 572.997905] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.104404] env[61852]: DEBUG oslo_concurrency.lockutils [req-b6649128-66f6-467a-91f8-c583c4d17189 req-1719986f-01df-4c82-8637-a664436ae2bc service nova] Releasing lock "refresh_cache-d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.104404] env[61852]: DEBUG nova.compute.manager [req-b6649128-66f6-467a-91f8-c583c4d17189 req-1719986f-01df-4c82-8637-a664436ae2bc service nova] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Received event network-vif-deleted-2ad5fcff-857c-46b7-a1e1-23ee0f089406 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.130682] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Successfully created port: 1175d90f-7ea1-4565-aa2c-fb93a0a3db16 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 573.207286] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8607996-e30e-4219-80cc-0567c8452a92 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.214077] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf75c4b-094c-4f61-a5c9-0a0ef614eda7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.247185] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99192746-5996-4aac-8448-acfed95aacc8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.250245] env[61852]: DEBUG nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 573.257336] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24f8cb8-c5e9-489a-bc8b-6f7edfd8f0d7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.270720] env[61852]: DEBUG nova.compute.provider_tree [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.455555] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8e1ae0e5-04a6-44ea-a2a2-c296a4f7f8c8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "f112b2be-fbd7-4a01-b369-25fe490e4204" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.561s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.774955] env[61852]: DEBUG nova.scheduler.client.report [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 573.874528] env[61852]: DEBUG nova.compute.manager [req-093d1ae6-463e-469a-b1a5-c131ef50d161 req-da5818cb-388b-481a-97d5-f8508d4f6c2b service nova] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Received event network-changed-1175d90f-7ea1-4565-aa2c-fb93a0a3db16 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.874528] env[61852]: DEBUG nova.compute.manager [req-093d1ae6-463e-469a-b1a5-c131ef50d161 req-da5818cb-388b-481a-97d5-f8508d4f6c2b service nova] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Refreshing instance network info cache due to event network-changed-1175d90f-7ea1-4565-aa2c-fb93a0a3db16. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 573.874528] env[61852]: DEBUG oslo_concurrency.lockutils [req-093d1ae6-463e-469a-b1a5-c131ef50d161 req-da5818cb-388b-481a-97d5-f8508d4f6c2b service nova] Acquiring lock "refresh_cache-57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.874528] env[61852]: DEBUG oslo_concurrency.lockutils [req-093d1ae6-463e-469a-b1a5-c131ef50d161 req-da5818cb-388b-481a-97d5-f8508d4f6c2b service nova] Acquired lock "refresh_cache-57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.874528] env[61852]: DEBUG nova.network.neutron [req-093d1ae6-463e-469a-b1a5-c131ef50d161 req-da5818cb-388b-481a-97d5-f8508d4f6c2b service nova] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Refreshing network info cache for port 1175d90f-7ea1-4565-aa2c-fb93a0a3db16 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 573.958588] env[61852]: DEBUG nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 574.133864] env[61852]: ERROR nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1175d90f-7ea1-4565-aa2c-fb93a0a3db16, please check neutron logs for more information. [ 574.133864] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 574.133864] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.133864] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 574.133864] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.133864] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 574.133864] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.133864] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 574.133864] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.133864] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 574.133864] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.133864] env[61852]: ERROR nova.compute.manager raise self.value [ 574.133864] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.133864] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 574.133864] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.133864] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 574.134353] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.134353] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 574.134353] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1175d90f-7ea1-4565-aa2c-fb93a0a3db16, please check neutron logs for more information. [ 574.134353] env[61852]: ERROR nova.compute.manager [ 574.134353] env[61852]: Traceback (most recent call last): [ 574.134353] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 574.134353] env[61852]: listener.cb(fileno) [ 574.134353] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.134353] env[61852]: result = function(*args, **kwargs) [ 574.134353] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.134353] env[61852]: return func(*args, **kwargs) [ 574.134353] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.134353] env[61852]: raise e [ 574.134353] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.134353] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 574.134353] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.134353] env[61852]: created_port_ids = self._update_ports_for_instance( [ 574.134353] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.134353] env[61852]: with excutils.save_and_reraise_exception(): [ 574.134353] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.134353] env[61852]: self.force_reraise() [ 574.134353] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.134353] env[61852]: raise self.value [ 574.134353] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.134353] env[61852]: updated_port = self._update_port( [ 574.134353] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.134353] env[61852]: _ensure_no_port_binding_failure(port) [ 574.134353] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.134353] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 574.135145] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 1175d90f-7ea1-4565-aa2c-fb93a0a3db16, please check neutron logs for more information. [ 574.135145] env[61852]: Removing descriptor: 19 [ 574.259413] env[61852]: DEBUG nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 574.282124] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.051s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.282124] env[61852]: ERROR nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0de7a94b-007f-4544-a3de-0dd7ab89898c, please check neutron logs for more information. [ 574.282124] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Traceback (most recent call last): [ 574.282124] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 574.282124] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] self.driver.spawn(context, instance, image_meta, [ 574.282124] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 574.282124] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 574.282124] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 574.282124] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] vm_ref = self.build_virtual_machine(instance, [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] vif_infos = vmwarevif.get_vif_info(self._session, [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] for vif in network_info: [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] return self._sync_wrapper(fn, *args, **kwargs) [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] self.wait() [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] self[:] = self._gt.wait() [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] return self._exit_event.wait() [ 574.282524] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] result = hub.switch() [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] return self.greenlet.switch() [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] result = function(*args, **kwargs) [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] return func(*args, **kwargs) [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] raise e [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] nwinfo = self.network_api.allocate_for_instance( [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.282877] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] created_port_ids = self._update_ports_for_instance( [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] with excutils.save_and_reraise_exception(): [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] self.force_reraise() [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] raise self.value [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] updated_port = self._update_port( [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] _ensure_no_port_binding_failure(port) [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.283246] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] raise exception.PortBindingFailed(port_id=port['id']) [ 574.283578] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] nova.exception.PortBindingFailed: Binding failed for port 0de7a94b-007f-4544-a3de-0dd7ab89898c, please check neutron logs for more information. [ 574.283578] env[61852]: ERROR nova.compute.manager [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] [ 574.283578] env[61852]: DEBUG nova.compute.utils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Binding failed for port 0de7a94b-007f-4544-a3de-0dd7ab89898c, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 574.285473] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.912s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.286960] env[61852]: INFO nova.compute.claims [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.289721] env[61852]: DEBUG nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Build of instance eea17bb5-01e3-4144-a579-2a56be8154c4 was re-scheduled: Binding failed for port 0de7a94b-007f-4544-a3de-0dd7ab89898c, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 574.290208] env[61852]: DEBUG nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 574.290545] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-eea17bb5-01e3-4144-a579-2a56be8154c4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.290657] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-eea17bb5-01e3-4144-a579-2a56be8154c4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.290843] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 574.297520] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 574.297742] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 574.297894] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 574.298088] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 574.298267] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 574.298416] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 574.298613] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 574.298770] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 574.298935] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 574.299227] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 574.299455] env[61852]: DEBUG nova.virt.hardware [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 574.300373] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a759b11b-f55e-4975-ac58-3a3319f61e4a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.309482] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c71bd4-cabc-4bd4-9fc4-959b4f8ed967 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.325918] env[61852]: ERROR nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1175d90f-7ea1-4565-aa2c-fb93a0a3db16, please check neutron logs for more information. [ 574.325918] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Traceback (most recent call last): [ 574.325918] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 574.325918] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] yield resources [ 574.325918] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 574.325918] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] self.driver.spawn(context, instance, image_meta, [ 574.325918] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 574.325918] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 574.325918] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 574.325918] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] vm_ref = self.build_virtual_machine(instance, [ 574.325918] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] vif_infos = vmwarevif.get_vif_info(self._session, [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] for vif in network_info: [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] return self._sync_wrapper(fn, *args, **kwargs) [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] self.wait() [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] self[:] = self._gt.wait() [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] return self._exit_event.wait() [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 574.326405] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] current.throw(*self._exc) [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] result = function(*args, **kwargs) [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] return func(*args, **kwargs) [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] raise e [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] nwinfo = self.network_api.allocate_for_instance( [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] created_port_ids = self._update_ports_for_instance( [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] with excutils.save_and_reraise_exception(): [ 574.326777] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] self.force_reraise() [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] raise self.value [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] updated_port = self._update_port( [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] _ensure_no_port_binding_failure(port) [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] raise exception.PortBindingFailed(port_id=port['id']) [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] nova.exception.PortBindingFailed: Binding failed for port 1175d90f-7ea1-4565-aa2c-fb93a0a3db16, please check neutron logs for more information. [ 574.327150] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] [ 574.327150] env[61852]: INFO nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Terminating instance [ 574.328784] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "refresh_cache-57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.393421] env[61852]: DEBUG nova.network.neutron [req-093d1ae6-463e-469a-b1a5-c131ef50d161 req-da5818cb-388b-481a-97d5-f8508d4f6c2b service nova] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 574.491232] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.491232] env[61852]: DEBUG nova.network.neutron [req-093d1ae6-463e-469a-b1a5-c131ef50d161 req-da5818cb-388b-481a-97d5-f8508d4f6c2b service nova] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.815762] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 574.904050] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.993459] env[61852]: DEBUG oslo_concurrency.lockutils [req-093d1ae6-463e-469a-b1a5-c131ef50d161 req-da5818cb-388b-481a-97d5-f8508d4f6c2b service nova] Releasing lock "refresh_cache-57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.993856] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquired lock "refresh_cache-57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.994062] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 575.410392] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-eea17bb5-01e3-4144-a579-2a56be8154c4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.410638] env[61852]: DEBUG nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 575.410819] env[61852]: DEBUG nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 575.410989] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 575.426598] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 575.521122] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 575.615865] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.760674] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f59575e-fa0b-44d5-8f9e-7b69c591307b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.768633] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6819ca40-ee8d-46bc-ad3b-ae65440f0da7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.804708] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1223f1-82c7-491f-b666-1fc9bcef83c6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.812455] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab379f2e-deb5-483b-99e9-9d86800f7965 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.825902] env[61852]: DEBUG nova.compute.provider_tree [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.929596] env[61852]: DEBUG nova.network.neutron [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.015696] env[61852]: DEBUG nova.compute.manager [req-32506908-9a73-4a56-ab74-5118b3611be0 req-31da0e49-7c6f-4381-9550-c2b6b9f33ccf service nova] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Received event network-vif-deleted-1175d90f-7ea1-4565-aa2c-fb93a0a3db16 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 576.121968] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Releasing lock "refresh_cache-57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.122469] env[61852]: DEBUG nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 576.122665] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 576.122961] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc8cab5d-8793-4bdb-b36a-47d25376f0fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.131643] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60979cbd-5298-455f-85a4-0b1c4bcc3261 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.151732] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a could not be found. [ 576.151917] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 576.152082] env[61852]: INFO nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 576.152410] env[61852]: DEBUG oslo.service.loopingcall [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 576.152571] env[61852]: DEBUG nova.compute.manager [-] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 576.152639] env[61852]: DEBUG nova.network.neutron [-] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 576.173245] env[61852]: DEBUG nova.network.neutron [-] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.335244] env[61852]: DEBUG nova.scheduler.client.report [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 576.435017] env[61852]: INFO nova.compute.manager [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: eea17bb5-01e3-4144-a579-2a56be8154c4] Took 1.02 seconds to deallocate network for instance. [ 576.675650] env[61852]: DEBUG nova.network.neutron [-] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.843269] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.558s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.843943] env[61852]: DEBUG nova.compute.manager [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 576.846639] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.874s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.178945] env[61852]: INFO nova.compute.manager [-] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Took 1.03 seconds to deallocate network for instance. [ 577.181442] env[61852]: DEBUG nova.compute.claims [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 577.181578] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.351719] env[61852]: DEBUG nova.compute.utils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 577.353199] env[61852]: DEBUG nova.compute.manager [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Not allocating networking since 'none' was specified. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 577.461607] env[61852]: INFO nova.scheduler.client.report [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted allocations for instance eea17bb5-01e3-4144-a579-2a56be8154c4 [ 577.641940] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Acquiring lock "beffa800-ff93-4230-be14-f2b906666cc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.642214] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Lock "beffa800-ff93-4230-be14-f2b906666cc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.746278] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d594e16a-f298-40a5-8185-609f330a6606 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.756931] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98540e8a-7c9c-4f5c-9f17-814dd8438a35 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.792019] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80332a3d-6dbe-4f5c-b4a8-9befb1c10a30 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.799032] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf925f9-a2a5-45d6-baa6-e26a1a5fcc1a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.811847] env[61852]: DEBUG nova.compute.provider_tree [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.855923] env[61852]: DEBUG nova.compute.manager [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 577.971938] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d5de01c-aa4b-45c9-95f6-6034dc89c1ac tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "eea17bb5-01e3-4144-a579-2a56be8154c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.208s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.315293] env[61852]: DEBUG nova.scheduler.client.report [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 578.476011] env[61852]: DEBUG nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 578.819918] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.973s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.820545] env[61852]: ERROR nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bc67f150-5d95-4527-b429-529707a6d170, please check neutron logs for more information. [ 578.820545] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Traceback (most recent call last): [ 578.820545] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 578.820545] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] self.driver.spawn(context, instance, image_meta, [ 578.820545] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 578.820545] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 578.820545] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 578.820545] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] vm_ref = self.build_virtual_machine(instance, [ 578.820545] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 578.820545] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] vif_infos = vmwarevif.get_vif_info(self._session, [ 578.820545] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] for vif in network_info: [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] return self._sync_wrapper(fn, *args, **kwargs) [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] self.wait() [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] self[:] = self._gt.wait() [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] return self._exit_event.wait() [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] result = hub.switch() [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 578.820875] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] return self.greenlet.switch() [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] result = function(*args, **kwargs) [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] return func(*args, **kwargs) [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] raise e [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] nwinfo = self.network_api.allocate_for_instance( [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] created_port_ids = self._update_ports_for_instance( [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] with excutils.save_and_reraise_exception(): [ 578.821274] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] self.force_reraise() [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] raise self.value [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] updated_port = self._update_port( [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] _ensure_no_port_binding_failure(port) [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] raise exception.PortBindingFailed(port_id=port['id']) [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] nova.exception.PortBindingFailed: Binding failed for port bc67f150-5d95-4527-b429-529707a6d170, please check neutron logs for more information. [ 578.821631] env[61852]: ERROR nova.compute.manager [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] [ 578.821930] env[61852]: DEBUG nova.compute.utils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Binding failed for port bc67f150-5d95-4527-b429-529707a6d170, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 578.827684] env[61852]: DEBUG nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Build of instance c0d84943-8398-401d-ac7b-f4436bb8325f was re-scheduled: Binding failed for port bc67f150-5d95-4527-b429-529707a6d170, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 578.828152] env[61852]: DEBUG nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 578.828442] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "refresh_cache-c0d84943-8398-401d-ac7b-f4436bb8325f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.828595] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquired lock "refresh_cache-c0d84943-8398-401d-ac7b-f4436bb8325f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.828757] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 578.829776] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.765s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.831839] env[61852]: INFO nova.compute.claims [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.863451] env[61852]: DEBUG nova.compute.manager [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 578.886698] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 578.886951] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 578.887941] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.887941] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 578.887941] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.887941] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 578.887941] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 578.888208] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 578.888208] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 578.888322] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 578.888489] env[61852]: DEBUG nova.virt.hardware [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 578.889573] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861b7376-2dae-49e6-96ea-73c00f3be61e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.898181] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c09f79-cc18-4342-962d-f4ad13266e78 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.911016] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 578.916460] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Creating folder: Project (d7f68538337a4f4499339eed797ee279). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 578.917032] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65b796a3-679c-407d-b322-b0bd33647145 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.927611] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Created folder: Project (d7f68538337a4f4499339eed797ee279) in parent group-v277280. [ 578.927611] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Creating folder: Instances. Parent ref: group-v277286. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 578.927611] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8b653cc-ec82-429f-8683-b689f38deee4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.937410] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Created folder: Instances in parent group-v277286. [ 578.937511] env[61852]: DEBUG oslo.service.loopingcall [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 578.937686] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 578.937887] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c172dc3-312e-404f-9e3a-2e404560930a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.956673] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 578.956673] env[61852]: value = "task-1292685" [ 578.956673] env[61852]: _type = "Task" [ 578.956673] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.964258] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292685, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.996483] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.351607] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 579.425367] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.467205] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292685, 'name': CreateVM_Task, 'duration_secs': 0.279426} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.467853] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 579.468122] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.468281] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.468586] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 579.468885] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c128cc1-252b-4210-81ac-dac7a9c3b549 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.477398] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for the task: (returnval){ [ 579.477398] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d7098b-ba47-e3db-f1d8-660592c471e6" [ 579.477398] env[61852]: _type = "Task" [ 579.477398] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.483595] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d7098b-ba47-e3db-f1d8-660592c471e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.548316] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.548651] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.929279] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Releasing lock "refresh_cache-c0d84943-8398-401d-ac7b-f4436bb8325f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.929551] env[61852]: DEBUG nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 579.929748] env[61852]: DEBUG nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 579.929846] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 579.945472] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 579.984961] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d7098b-ba47-e3db-f1d8-660592c471e6, 'name': SearchDatastore_Task, 'duration_secs': 0.009816} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.987415] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.987674] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 579.987888] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.988457] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.988457] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 579.988660] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eceff106-28a6-4d7a-ae25-e168ea123a90 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.996524] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 579.996704] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 579.999803] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37114f05-0512-4768-869f-dfd57aa080c1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.005149] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for the task: (returnval){ [ 580.005149] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529b2aeb-6a77-e0c6-4b1a-ae9b327a8a7d" [ 580.005149] env[61852]: _type = "Task" [ 580.005149] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.013099] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529b2aeb-6a77-e0c6-4b1a-ae9b327a8a7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.213027] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42081ee6-f86e-46c0-be71-67111a7d23bf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.220051] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6275ccb6-976d-4c10-bbf2-bbbd042846c0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.249727] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581ad912-8f2e-49f8-9795-8cb785947495 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.257009] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe660ac-016c-42e7-9abc-7de7c596bb4e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.269121] env[61852]: DEBUG nova.compute.provider_tree [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.452559] env[61852]: DEBUG nova.network.neutron [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.515425] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529b2aeb-6a77-e0c6-4b1a-ae9b327a8a7d, 'name': SearchDatastore_Task, 'duration_secs': 0.00816} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.516200] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d2eff0c-b66b-4569-a901-57c322607fb3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.520934] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for the task: (returnval){ [ 580.520934] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5203f7f4-15b0-6b13-1284-814c44473ce7" [ 580.520934] env[61852]: _type = "Task" [ 580.520934] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.528245] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5203f7f4-15b0-6b13-1284-814c44473ce7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.772075] env[61852]: DEBUG nova.scheduler.client.report [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 580.955370] env[61852]: INFO nova.compute.manager [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: c0d84943-8398-401d-ac7b-f4436bb8325f] Took 1.03 seconds to deallocate network for instance. [ 581.033042] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5203f7f4-15b0-6b13-1284-814c44473ce7, 'name': SearchDatastore_Task, 'duration_secs': 0.008674} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.033042] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.033042] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 4ce41dca-63c6-447d-9c0a-00f9966e0093/4ce41dca-63c6-447d-9c0a-00f9966e0093.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 581.033042] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb1174a6-ad23-40ac-8e6c-bc8713a58233 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.038670] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for the task: (returnval){ [ 581.038670] env[61852]: value = "task-1292686" [ 581.038670] env[61852]: _type = "Task" [ 581.038670] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.046310] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292686, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.277720] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.278456] env[61852]: DEBUG nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 581.281616] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.374s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.283418] env[61852]: INFO nova.compute.claims [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.549138] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292686, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505595} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.549414] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 4ce41dca-63c6-447d-9c0a-00f9966e0093/4ce41dca-63c6-447d-9c0a-00f9966e0093.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 581.549700] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 581.549871] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8f21a1b-aa1b-433f-9ae7-ec3ff7a9cf2e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.556754] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for the task: (returnval){ [ 581.556754] env[61852]: value = "task-1292687" [ 581.556754] env[61852]: _type = "Task" [ 581.556754] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.564558] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292687, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.790886] env[61852]: DEBUG nova.compute.utils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 581.792325] env[61852]: DEBUG nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 581.792489] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 581.841463] env[61852]: DEBUG nova.policy [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '848ff8248b8649daa86ac1b0d1619f8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7f822f0b183423c805a2cd50cfd7671', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 581.992614] env[61852]: INFO nova.scheduler.client.report [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Deleted allocations for instance c0d84943-8398-401d-ac7b-f4436bb8325f [ 582.069039] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292687, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128489} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.069039] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 582.069688] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db53d1f4-4941-4daf-b80f-6b51c1983d93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.089554] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 4ce41dca-63c6-447d-9c0a-00f9966e0093/4ce41dca-63c6-447d-9c0a-00f9966e0093.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 582.089842] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b924ad5-0c74-4b94-b2bb-38b937f6e562 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.109765] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for the task: (returnval){ [ 582.109765] env[61852]: value = "task-1292688" [ 582.109765] env[61852]: _type = "Task" [ 582.109765] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.117223] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292688, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.118531] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Successfully created port: df6c589a-e0da-470c-8c06-ce35356367e9 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.296787] env[61852]: DEBUG nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 582.502553] env[61852]: DEBUG oslo_concurrency.lockutils [None req-869a7223-b7fc-4f57-a596-d074efcfdce8 tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "c0d84943-8398-401d-ac7b-f4436bb8325f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.704s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.619223] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292688, 'name': ReconfigVM_Task, 'duration_secs': 0.299629} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.619532] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 4ce41dca-63c6-447d-9c0a-00f9966e0093/4ce41dca-63c6-447d-9c0a-00f9966e0093.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 582.620137] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcfca656-7710-4720-8ee6-1f1c0eef4707 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.627660] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for the task: (returnval){ [ 582.627660] env[61852]: value = "task-1292689" [ 582.627660] env[61852]: _type = "Task" [ 582.627660] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.635260] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292689, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.662020] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d9f83a-f9e2-4308-b119-93fa500bbc12 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.670024] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fe76d9-99cf-48ed-8f27-ba429c85114d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.700469] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf306c38-eed4-41da-bbb8-26f900549a88 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.708437] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311e147a-dfe2-45aa-8f07-057faed24edd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.726838] env[61852]: DEBUG nova.compute.provider_tree [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.834875] env[61852]: DEBUG nova.compute.manager [req-57c3df1d-fcd6-4fb1-bb8d-a952e717ef29 req-ad13b5f1-bf35-4b96-a82f-4c632951aceb service nova] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Received event network-changed-df6c589a-e0da-470c-8c06-ce35356367e9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 582.835043] env[61852]: DEBUG nova.compute.manager [req-57c3df1d-fcd6-4fb1-bb8d-a952e717ef29 req-ad13b5f1-bf35-4b96-a82f-4c632951aceb service nova] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Refreshing instance network info cache due to event network-changed-df6c589a-e0da-470c-8c06-ce35356367e9. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 582.836497] env[61852]: DEBUG oslo_concurrency.lockutils [req-57c3df1d-fcd6-4fb1-bb8d-a952e717ef29 req-ad13b5f1-bf35-4b96-a82f-4c632951aceb service nova] Acquiring lock "refresh_cache-e9a7c08d-e021-43d0-b757-6ad0174b4648" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.836497] env[61852]: DEBUG oslo_concurrency.lockutils [req-57c3df1d-fcd6-4fb1-bb8d-a952e717ef29 req-ad13b5f1-bf35-4b96-a82f-4c632951aceb service nova] Acquired lock "refresh_cache-e9a7c08d-e021-43d0-b757-6ad0174b4648" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.836497] env[61852]: DEBUG nova.network.neutron [req-57c3df1d-fcd6-4fb1-bb8d-a952e717ef29 req-ad13b5f1-bf35-4b96-a82f-4c632951aceb service nova] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Refreshing network info cache for port df6c589a-e0da-470c-8c06-ce35356367e9 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 583.003711] env[61852]: DEBUG nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 583.016193] env[61852]: ERROR nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port df6c589a-e0da-470c-8c06-ce35356367e9, please check neutron logs for more information. [ 583.016193] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 583.016193] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.016193] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 583.016193] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.016193] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 583.016193] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.016193] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 583.016193] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.016193] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 583.016193] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.016193] env[61852]: ERROR nova.compute.manager raise self.value [ 583.016193] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.016193] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 583.016193] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.016193] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 583.016681] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.016681] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 583.016681] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port df6c589a-e0da-470c-8c06-ce35356367e9, please check neutron logs for more information. [ 583.016681] env[61852]: ERROR nova.compute.manager [ 583.016681] env[61852]: Traceback (most recent call last): [ 583.016681] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 583.016681] env[61852]: listener.cb(fileno) [ 583.016681] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.016681] env[61852]: result = function(*args, **kwargs) [ 583.016681] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 583.016681] env[61852]: return func(*args, **kwargs) [ 583.016681] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 583.016681] env[61852]: raise e [ 583.016681] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.016681] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 583.016681] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.016681] env[61852]: created_port_ids = self._update_ports_for_instance( [ 583.016681] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.016681] env[61852]: with excutils.save_and_reraise_exception(): [ 583.016681] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.016681] env[61852]: self.force_reraise() [ 583.016681] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.016681] env[61852]: raise self.value [ 583.016681] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.016681] env[61852]: updated_port = self._update_port( [ 583.016681] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.016681] env[61852]: _ensure_no_port_binding_failure(port) [ 583.016681] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.016681] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 583.017702] env[61852]: nova.exception.PortBindingFailed: Binding failed for port df6c589a-e0da-470c-8c06-ce35356367e9, please check neutron logs for more information. [ 583.017702] env[61852]: Removing descriptor: 19 [ 583.138028] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292689, 'name': Rename_Task, 'duration_secs': 0.122969} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.138028] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 583.138028] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5b73570-92ec-4101-9272-d466f2d77efd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.143800] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for the task: (returnval){ [ 583.143800] env[61852]: value = "task-1292690" [ 583.143800] env[61852]: _type = "Task" [ 583.143800] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.150852] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292690, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.229663] env[61852]: DEBUG nova.scheduler.client.report [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 583.310450] env[61852]: DEBUG nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 583.336670] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:20:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1123826384',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-895780841',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 583.336904] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 583.337073] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.337255] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 583.337458] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.337624] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 583.337832] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 583.337992] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 583.338171] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 583.338335] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 583.338508] env[61852]: DEBUG nova.virt.hardware [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 583.339489] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2e50d8-28a8-4116-9dbb-0660365ef228 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.349070] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5332fc-b900-41e9-b13c-5de6be5c8799 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.363742] env[61852]: ERROR nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port df6c589a-e0da-470c-8c06-ce35356367e9, please check neutron logs for more information. [ 583.363742] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Traceback (most recent call last): [ 583.363742] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 583.363742] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] yield resources [ 583.363742] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 583.363742] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] self.driver.spawn(context, instance, image_meta, [ 583.363742] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 583.363742] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.363742] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.363742] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] vm_ref = self.build_virtual_machine(instance, [ 583.363742] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] for vif in network_info: [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] return self._sync_wrapper(fn, *args, **kwargs) [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] self.wait() [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] self[:] = self._gt.wait() [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] return self._exit_event.wait() [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 583.364173] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] current.throw(*self._exc) [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] result = function(*args, **kwargs) [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] return func(*args, **kwargs) [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] raise e [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] nwinfo = self.network_api.allocate_for_instance( [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] created_port_ids = self._update_ports_for_instance( [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] with excutils.save_and_reraise_exception(): [ 583.364589] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] self.force_reraise() [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] raise self.value [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] updated_port = self._update_port( [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] _ensure_no_port_binding_failure(port) [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] raise exception.PortBindingFailed(port_id=port['id']) [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] nova.exception.PortBindingFailed: Binding failed for port df6c589a-e0da-470c-8c06-ce35356367e9, please check neutron logs for more information. [ 583.365073] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] [ 583.365073] env[61852]: INFO nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Terminating instance [ 583.366327] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Acquiring lock "refresh_cache-e9a7c08d-e021-43d0-b757-6ad0174b4648" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.367108] env[61852]: DEBUG nova.network.neutron [req-57c3df1d-fcd6-4fb1-bb8d-a952e717ef29 req-ad13b5f1-bf35-4b96-a82f-4c632951aceb service nova] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 583.443309] env[61852]: DEBUG nova.network.neutron [req-57c3df1d-fcd6-4fb1-bb8d-a952e717ef29 req-ad13b5f1-bf35-4b96-a82f-4c632951aceb service nova] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.522490] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.653829] env[61852]: DEBUG oslo_vmware.api [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292690, 'name': PowerOnVM_Task, 'duration_secs': 0.424277} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.654122] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 583.654329] env[61852]: INFO nova.compute.manager [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Took 4.79 seconds to spawn the instance on the hypervisor. [ 583.654687] env[61852]: DEBUG nova.compute.manager [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 583.655304] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6d1809-eb77-41b3-b698-e924eff371bd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.734252] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.453s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.734861] env[61852]: DEBUG nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 583.739834] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.631s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.739834] env[61852]: DEBUG nova.objects.instance [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61852) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 583.950664] env[61852]: DEBUG oslo_concurrency.lockutils [req-57c3df1d-fcd6-4fb1-bb8d-a952e717ef29 req-ad13b5f1-bf35-4b96-a82f-4c632951aceb service nova] Releasing lock "refresh_cache-e9a7c08d-e021-43d0-b757-6ad0174b4648" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.950664] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Acquired lock "refresh_cache-e9a7c08d-e021-43d0-b757-6ad0174b4648" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.950664] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 584.003020] env[61852]: DEBUG nova.compute.manager [None req-2b4aa4ff-6968-4300-9948-b5d8527ba21e tempest-ServerDiagnosticsV248Test-2011693375 tempest-ServerDiagnosticsV248Test-2011693375-project-admin] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 584.003020] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853e5f0c-be03-41d3-b358-14b932af3427 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.009081] env[61852]: INFO nova.compute.manager [None req-2b4aa4ff-6968-4300-9948-b5d8527ba21e tempest-ServerDiagnosticsV248Test-2011693375 tempest-ServerDiagnosticsV248Test-2011693375-project-admin] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Retrieving diagnostics [ 584.010288] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9846177-ab92-4c0a-b964-312452b245c3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.172294] env[61852]: INFO nova.compute.manager [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Took 32.82 seconds to build instance. [ 584.240022] env[61852]: DEBUG nova.compute.utils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 584.241383] env[61852]: DEBUG nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 584.241530] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 584.287803] env[61852]: DEBUG nova.policy [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1e0b43e97df47d496aa9593d4168c12', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae52e05dc15f41f5a6ef7b027810563b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 584.467818] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.528272] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Successfully created port: 7df0f793-7d24-48e0-ba34-a70be0d427af {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 584.675529] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46668b8e-2047-4e49-8da2-859ffc858ae3 tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lock "4ce41dca-63c6-447d-9c0a-00f9966e0093" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.282s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.733240] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.749018] env[61852]: DEBUG nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 584.749915] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41f54486-a63d-4174-b50e-880c105631d6 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.751087] env[61852]: DEBUG oslo_concurrency.lockutils [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.248s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.751426] env[61852]: DEBUG nova.objects.instance [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61852) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 585.029760] env[61852]: DEBUG nova.compute.manager [req-9ebc7966-1250-4edc-be32-af454ec5807d req-cc30e283-6852-4abe-ac30-8b57923c9fb0 service nova] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Received event network-vif-deleted-df6c589a-e0da-470c-8c06-ce35356367e9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 585.178543] env[61852]: DEBUG nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 585.238021] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Releasing lock "refresh_cache-e9a7c08d-e021-43d0-b757-6ad0174b4648" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.238021] env[61852]: DEBUG nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 585.238021] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 585.238242] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b99d13d5-0443-48d6-b290-0af80e4dcc30 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.250915] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487ee30a-1681-44d6-a75e-8c965231ca4c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.283751] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e9a7c08d-e021-43d0-b757-6ad0174b4648 could not be found. [ 585.283751] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 585.283916] env[61852]: INFO nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Took 0.05 seconds to destroy the instance on the hypervisor. [ 585.284112] env[61852]: DEBUG oslo.service.loopingcall [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 585.284907] env[61852]: DEBUG nova.compute.manager [-] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 585.284907] env[61852]: DEBUG nova.network.neutron [-] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 585.304261] env[61852]: DEBUG nova.network.neutron [-] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.357579] env[61852]: ERROR nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7df0f793-7d24-48e0-ba34-a70be0d427af, please check neutron logs for more information. [ 585.357579] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 585.357579] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.357579] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 585.357579] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 585.357579] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 585.357579] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 585.357579] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 585.357579] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.357579] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 585.357579] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.357579] env[61852]: ERROR nova.compute.manager raise self.value [ 585.357579] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 585.357579] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 585.357579] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.357579] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 585.358259] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.358259] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 585.358259] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7df0f793-7d24-48e0-ba34-a70be0d427af, please check neutron logs for more information. [ 585.358259] env[61852]: ERROR nova.compute.manager [ 585.358259] env[61852]: Traceback (most recent call last): [ 585.358259] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 585.358259] env[61852]: listener.cb(fileno) [ 585.358259] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.358259] env[61852]: result = function(*args, **kwargs) [ 585.358259] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 585.358259] env[61852]: return func(*args, **kwargs) [ 585.358259] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.358259] env[61852]: raise e [ 585.358259] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.358259] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 585.358259] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 585.358259] env[61852]: created_port_ids = self._update_ports_for_instance( [ 585.358259] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 585.358259] env[61852]: with excutils.save_and_reraise_exception(): [ 585.358259] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.358259] env[61852]: self.force_reraise() [ 585.358259] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.358259] env[61852]: raise self.value [ 585.358259] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 585.358259] env[61852]: updated_port = self._update_port( [ 585.358259] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.358259] env[61852]: _ensure_no_port_binding_failure(port) [ 585.358259] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.358259] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 585.359212] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 7df0f793-7d24-48e0-ba34-a70be0d427af, please check neutron logs for more information. [ 585.359212] env[61852]: Removing descriptor: 19 [ 585.700388] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.774138] env[61852]: DEBUG oslo_concurrency.lockutils [None req-590ba99c-c093-437e-8fcc-45ea3edde710 tempest-ServersAdmin275Test-256889786 tempest-ServersAdmin275Test-256889786-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.775836] env[61852]: DEBUG nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 585.778184] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.863s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.780653] env[61852]: INFO nova.compute.claims [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 585.801524] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 585.801756] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 585.801909] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 585.802156] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 585.802330] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 585.802503] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 585.802761] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 585.802937] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 585.803131] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 585.803318] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 585.803536] env[61852]: DEBUG nova.virt.hardware [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 585.804668] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e759d1cc-b3fd-41c5-8dbd-ff71b59573b9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.807192] env[61852]: DEBUG nova.network.neutron [-] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.814141] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92aeb46b-222e-4b8f-b06b-44ee6aef4f0d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.829833] env[61852]: ERROR nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7df0f793-7d24-48e0-ba34-a70be0d427af, please check neutron logs for more information. [ 585.829833] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Traceback (most recent call last): [ 585.829833] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 585.829833] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] yield resources [ 585.829833] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 585.829833] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] self.driver.spawn(context, instance, image_meta, [ 585.829833] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 585.829833] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.829833] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 585.829833] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] vm_ref = self.build_virtual_machine(instance, [ 585.829833] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] vif_infos = vmwarevif.get_vif_info(self._session, [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] for vif in network_info: [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] return self._sync_wrapper(fn, *args, **kwargs) [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] self.wait() [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] self[:] = self._gt.wait() [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] return self._exit_event.wait() [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 585.830296] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] current.throw(*self._exc) [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] result = function(*args, **kwargs) [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] return func(*args, **kwargs) [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] raise e [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] nwinfo = self.network_api.allocate_for_instance( [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] created_port_ids = self._update_ports_for_instance( [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] with excutils.save_and_reraise_exception(): [ 585.830665] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] self.force_reraise() [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] raise self.value [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] updated_port = self._update_port( [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] _ensure_no_port_binding_failure(port) [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] raise exception.PortBindingFailed(port_id=port['id']) [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] nova.exception.PortBindingFailed: Binding failed for port 7df0f793-7d24-48e0-ba34-a70be0d427af, please check neutron logs for more information. [ 585.831056] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] [ 585.831056] env[61852]: INFO nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Terminating instance [ 585.831761] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Acquiring lock "refresh_cache-b5f994d9-e0aa-4335-8339-df76a1a032ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.831761] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Acquired lock "refresh_cache-b5f994d9-e0aa-4335-8339-df76a1a032ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.831761] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 586.310534] env[61852]: INFO nova.compute.manager [-] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Took 1.03 seconds to deallocate network for instance. [ 586.312977] env[61852]: DEBUG nova.compute.claims [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 586.313189] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.350673] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.495421] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.999041] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Releasing lock "refresh_cache-b5f994d9-e0aa-4335-8339-df76a1a032ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.999041] env[61852]: DEBUG nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 586.999250] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 586.999685] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4fe92c27-894e-4bb3-94c2-5108cb6d64ef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.010412] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f03c9e-6153-4c15-9174-676fc7d2ba13 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.036698] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b5f994d9-e0aa-4335-8339-df76a1a032ed could not be found. [ 587.036859] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 587.037056] env[61852]: INFO nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Took 0.04 seconds to destroy the instance on the hypervisor. [ 587.037303] env[61852]: DEBUG oslo.service.loopingcall [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 587.039791] env[61852]: DEBUG nova.compute.manager [-] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 587.039791] env[61852]: DEBUG nova.network.neutron [-] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 587.055081] env[61852]: DEBUG nova.compute.manager [req-0cec286e-c3c4-4fc1-9fe2-2083b4212d75 req-527e698e-7ae4-4f40-b6f8-6fa2b055504f service nova] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Received event network-changed-7df0f793-7d24-48e0-ba34-a70be0d427af {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 587.055273] env[61852]: DEBUG nova.compute.manager [req-0cec286e-c3c4-4fc1-9fe2-2083b4212d75 req-527e698e-7ae4-4f40-b6f8-6fa2b055504f service nova] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Refreshing instance network info cache due to event network-changed-7df0f793-7d24-48e0-ba34-a70be0d427af. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 587.055478] env[61852]: DEBUG oslo_concurrency.lockutils [req-0cec286e-c3c4-4fc1-9fe2-2083b4212d75 req-527e698e-7ae4-4f40-b6f8-6fa2b055504f service nova] Acquiring lock "refresh_cache-b5f994d9-e0aa-4335-8339-df76a1a032ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.055655] env[61852]: DEBUG oslo_concurrency.lockutils [req-0cec286e-c3c4-4fc1-9fe2-2083b4212d75 req-527e698e-7ae4-4f40-b6f8-6fa2b055504f service nova] Acquired lock "refresh_cache-b5f994d9-e0aa-4335-8339-df76a1a032ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.055755] env[61852]: DEBUG nova.network.neutron [req-0cec286e-c3c4-4fc1-9fe2-2083b4212d75 req-527e698e-7ae4-4f40-b6f8-6fa2b055504f service nova] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Refreshing network info cache for port 7df0f793-7d24-48e0-ba34-a70be0d427af {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 587.057703] env[61852]: DEBUG nova.network.neutron [-] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 587.141094] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cfc95e-5b56-44c7-b98f-5abbe776d353 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.148201] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887b95c1-df91-43fc-8e23-4cce01c17f4d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.178983] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68bdd30-f3dc-4b08-850d-bfe58e15611d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.186044] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9836f58-a009-4434-ad1f-3e596b99437d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.199646] env[61852]: DEBUG nova.compute.provider_tree [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.559731] env[61852]: DEBUG nova.network.neutron [-] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.573687] env[61852]: DEBUG nova.network.neutron [req-0cec286e-c3c4-4fc1-9fe2-2083b4212d75 req-527e698e-7ae4-4f40-b6f8-6fa2b055504f service nova] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 587.642555] env[61852]: DEBUG nova.network.neutron [req-0cec286e-c3c4-4fc1-9fe2-2083b4212d75 req-527e698e-7ae4-4f40-b6f8-6fa2b055504f service nova] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.702628] env[61852]: DEBUG nova.scheduler.client.report [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 588.063029] env[61852]: INFO nova.compute.manager [-] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Took 1.02 seconds to deallocate network for instance. [ 588.064977] env[61852]: DEBUG nova.compute.claims [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 588.065209] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.146586] env[61852]: DEBUG oslo_concurrency.lockutils [req-0cec286e-c3c4-4fc1-9fe2-2083b4212d75 req-527e698e-7ae4-4f40-b6f8-6fa2b055504f service nova] Releasing lock "refresh_cache-b5f994d9-e0aa-4335-8339-df76a1a032ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.146967] env[61852]: DEBUG nova.compute.manager [req-0cec286e-c3c4-4fc1-9fe2-2083b4212d75 req-527e698e-7ae4-4f40-b6f8-6fa2b055504f service nova] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Received event network-vif-deleted-7df0f793-7d24-48e0-ba34-a70be0d427af {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 588.207161] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.207698] env[61852]: DEBUG nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 588.210645] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.609s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.715480] env[61852]: DEBUG nova.compute.utils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 588.720016] env[61852]: DEBUG nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 588.720200] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 588.760450] env[61852]: DEBUG nova.policy [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4bfc850364994a828ba892fa45c74cd5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41ddecbc7e82431fb338e105c6ad477c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 589.076068] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Successfully created port: d2838531-972f-47ea-83ea-e4364d4030ee {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.136222] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11602593-0fe5-46bc-9bf4-0b10f8526541 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.145160] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7a4a4f-007c-4535-b369-8dba038e3868 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.176870] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8993e52-fdb1-4608-8a92-9995d764febe {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.185668] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba976b4-91bc-4ed1-93d7-9ad835d9313e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.201351] env[61852]: DEBUG nova.compute.provider_tree [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.220493] env[61852]: DEBUG nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 589.706968] env[61852]: DEBUG nova.scheduler.client.report [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 589.817571] env[61852]: DEBUG nova.compute.manager [req-0632fbd4-5a6b-4d23-b27b-9ef204d206aa req-85a8a247-9a58-43f9-b51b-018e17d7b22c service nova] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Received event network-changed-d2838531-972f-47ea-83ea-e4364d4030ee {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 589.817834] env[61852]: DEBUG nova.compute.manager [req-0632fbd4-5a6b-4d23-b27b-9ef204d206aa req-85a8a247-9a58-43f9-b51b-018e17d7b22c service nova] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Refreshing instance network info cache due to event network-changed-d2838531-972f-47ea-83ea-e4364d4030ee. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 589.818048] env[61852]: DEBUG oslo_concurrency.lockutils [req-0632fbd4-5a6b-4d23-b27b-9ef204d206aa req-85a8a247-9a58-43f9-b51b-018e17d7b22c service nova] Acquiring lock "refresh_cache-db41ed39-0fef-48ea-9197-8d3d8844547a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.818167] env[61852]: DEBUG oslo_concurrency.lockutils [req-0632fbd4-5a6b-4d23-b27b-9ef204d206aa req-85a8a247-9a58-43f9-b51b-018e17d7b22c service nova] Acquired lock "refresh_cache-db41ed39-0fef-48ea-9197-8d3d8844547a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.818297] env[61852]: DEBUG nova.network.neutron [req-0632fbd4-5a6b-4d23-b27b-9ef204d206aa req-85a8a247-9a58-43f9-b51b-018e17d7b22c service nova] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Refreshing network info cache for port d2838531-972f-47ea-83ea-e4364d4030ee {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 590.012776] env[61852]: ERROR nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d2838531-972f-47ea-83ea-e4364d4030ee, please check neutron logs for more information. [ 590.012776] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 590.012776] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 590.012776] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 590.012776] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.012776] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 590.012776] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.012776] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 590.012776] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.012776] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 590.012776] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.012776] env[61852]: ERROR nova.compute.manager raise self.value [ 590.012776] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.012776] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 590.012776] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.012776] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 590.013530] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.013530] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 590.013530] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d2838531-972f-47ea-83ea-e4364d4030ee, please check neutron logs for more information. [ 590.013530] env[61852]: ERROR nova.compute.manager [ 590.013530] env[61852]: Traceback (most recent call last): [ 590.013530] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 590.013530] env[61852]: listener.cb(fileno) [ 590.013530] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.013530] env[61852]: result = function(*args, **kwargs) [ 590.013530] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 590.013530] env[61852]: return func(*args, **kwargs) [ 590.013530] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 590.013530] env[61852]: raise e [ 590.013530] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 590.013530] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 590.013530] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.013530] env[61852]: created_port_ids = self._update_ports_for_instance( [ 590.013530] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.013530] env[61852]: with excutils.save_and_reraise_exception(): [ 590.013530] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.013530] env[61852]: self.force_reraise() [ 590.013530] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.013530] env[61852]: raise self.value [ 590.013530] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.013530] env[61852]: updated_port = self._update_port( [ 590.013530] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.013530] env[61852]: _ensure_no_port_binding_failure(port) [ 590.013530] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.013530] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 590.014714] env[61852]: nova.exception.PortBindingFailed: Binding failed for port d2838531-972f-47ea-83ea-e4364d4030ee, please check neutron logs for more information. [ 590.014714] env[61852]: Removing descriptor: 19 [ 590.156864] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.157167] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.214968] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.003s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.216205] env[61852]: ERROR nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 26aa5454-3290-42f7-909e-df1f87ecd38a, please check neutron logs for more information. [ 590.216205] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Traceback (most recent call last): [ 590.216205] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 590.216205] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] self.driver.spawn(context, instance, image_meta, [ 590.216205] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 590.216205] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 590.216205] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 590.216205] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] vm_ref = self.build_virtual_machine(instance, [ 590.216205] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 590.216205] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] vif_infos = vmwarevif.get_vif_info(self._session, [ 590.216205] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] for vif in network_info: [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] return self._sync_wrapper(fn, *args, **kwargs) [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] self.wait() [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] self[:] = self._gt.wait() [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] return self._exit_event.wait() [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] result = hub.switch() [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 590.216705] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] return self.greenlet.switch() [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] result = function(*args, **kwargs) [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] return func(*args, **kwargs) [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] raise e [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] nwinfo = self.network_api.allocate_for_instance( [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] created_port_ids = self._update_ports_for_instance( [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] with excutils.save_and_reraise_exception(): [ 590.217168] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] self.force_reraise() [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] raise self.value [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] updated_port = self._update_port( [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] _ensure_no_port_binding_failure(port) [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] raise exception.PortBindingFailed(port_id=port['id']) [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] nova.exception.PortBindingFailed: Binding failed for port 26aa5454-3290-42f7-909e-df1f87ecd38a, please check neutron logs for more information. [ 590.217601] env[61852]: ERROR nova.compute.manager [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] [ 590.217970] env[61852]: DEBUG nova.compute.utils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Binding failed for port 26aa5454-3290-42f7-909e-df1f87ecd38a, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 590.217970] env[61852]: DEBUG nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Build of instance a4736ffe-ad02-444d-bb6e-2cf4f70d64ee was re-scheduled: Binding failed for port 26aa5454-3290-42f7-909e-df1f87ecd38a, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 590.218131] env[61852]: DEBUG nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 590.218359] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Acquiring lock "refresh_cache-a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.218507] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Acquired lock "refresh_cache-a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.218661] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 590.220044] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.168s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.220857] env[61852]: DEBUG nova.objects.instance [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lazy-loading 'resources' on Instance uuid d75e131b-1933-4e1f-bcf1-62ed83779177 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 590.230466] env[61852]: DEBUG nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 590.259091] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 590.259344] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 590.259500] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.259678] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 590.259823] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.259970] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 590.261270] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 590.261470] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 590.261695] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 590.261902] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 590.262269] env[61852]: DEBUG nova.virt.hardware [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 590.263205] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30c2d63-f67c-4c85-89f7-65c6bbff84a6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.271727] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73ad776-06fc-4bd2-bc96-6412f1b3b7c2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.286936] env[61852]: ERROR nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d2838531-972f-47ea-83ea-e4364d4030ee, please check neutron logs for more information. [ 590.286936] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Traceback (most recent call last): [ 590.286936] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 590.286936] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] yield resources [ 590.286936] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 590.286936] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] self.driver.spawn(context, instance, image_meta, [ 590.286936] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 590.286936] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 590.286936] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 590.286936] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] vm_ref = self.build_virtual_machine(instance, [ 590.286936] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] vif_infos = vmwarevif.get_vif_info(self._session, [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] for vif in network_info: [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] return self._sync_wrapper(fn, *args, **kwargs) [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] self.wait() [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] self[:] = self._gt.wait() [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] return self._exit_event.wait() [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 590.287439] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] current.throw(*self._exc) [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] result = function(*args, **kwargs) [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] return func(*args, **kwargs) [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] raise e [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] nwinfo = self.network_api.allocate_for_instance( [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] created_port_ids = self._update_ports_for_instance( [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] with excutils.save_and_reraise_exception(): [ 590.287876] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] self.force_reraise() [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] raise self.value [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] updated_port = self._update_port( [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] _ensure_no_port_binding_failure(port) [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] raise exception.PortBindingFailed(port_id=port['id']) [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] nova.exception.PortBindingFailed: Binding failed for port d2838531-972f-47ea-83ea-e4364d4030ee, please check neutron logs for more information. [ 590.288340] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] [ 590.288340] env[61852]: INFO nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Terminating instance [ 590.289206] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Acquiring lock "refresh_cache-db41ed39-0fef-48ea-9197-8d3d8844547a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.338018] env[61852]: DEBUG nova.network.neutron [req-0632fbd4-5a6b-4d23-b27b-9ef204d206aa req-85a8a247-9a58-43f9-b51b-018e17d7b22c service nova] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.413499] env[61852]: DEBUG nova.network.neutron [req-0632fbd4-5a6b-4d23-b27b-9ef204d206aa req-85a8a247-9a58-43f9-b51b-018e17d7b22c service nova] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.663618] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.664588] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 590.664588] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Rebuilding the list of instances to heal {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 590.738028] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.824030] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.917267] env[61852]: DEBUG oslo_concurrency.lockutils [req-0632fbd4-5a6b-4d23-b27b-9ef204d206aa req-85a8a247-9a58-43f9-b51b-018e17d7b22c service nova] Releasing lock "refresh_cache-db41ed39-0fef-48ea-9197-8d3d8844547a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.917650] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Acquired lock "refresh_cache-db41ed39-0fef-48ea-9197-8d3d8844547a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.917842] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 591.064832] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adede6ad-6b35-4be4-99f4-786d1cf770ec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.072256] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92703c5a-5683-4224-858d-50e527d78118 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.103179] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13394bfa-0bc3-4e45-95bd-0d996a5b6029 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.110536] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b1e9aa-d0e8-428b-b37e-b178182ac98a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.124064] env[61852]: DEBUG nova.compute.provider_tree [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.167851] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 591.167996] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 591.168166] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 591.168305] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 591.168450] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 591.199280] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "refresh_cache-d75e131b-1933-4e1f-bcf1-62ed83779177" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.199437] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquired lock "refresh_cache-d75e131b-1933-4e1f-bcf1-62ed83779177" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.199589] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Forcefully refreshing network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 591.199742] env[61852]: DEBUG nova.objects.instance [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lazy-loading 'info_cache' on Instance uuid d75e131b-1933-4e1f-bcf1-62ed83779177 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 591.330035] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Releasing lock "refresh_cache-a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.330035] env[61852]: DEBUG nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 591.330035] env[61852]: DEBUG nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 591.330035] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 591.346189] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 591.438110] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 591.510519] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.626327] env[61852]: DEBUG nova.scheduler.client.report [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 591.847853] env[61852]: DEBUG nova.network.neutron [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.859618] env[61852]: DEBUG nova.compute.manager [req-0dc798bf-ebbb-4329-9adf-d42606698ed6 req-d0f0c72b-cd41-4b44-814f-21de8cd41d4f service nova] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Received event network-vif-deleted-d2838531-972f-47ea-83ea-e4364d4030ee {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 592.013794] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Releasing lock "refresh_cache-db41ed39-0fef-48ea-9197-8d3d8844547a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.014272] env[61852]: DEBUG nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 592.014465] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 592.014779] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d6ac6ce-f212-4052-b154-eaf58ec5f0aa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.024786] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4ec6f9-235a-4728-8995-f7dc62a9cf2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.045846] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db41ed39-0fef-48ea-9197-8d3d8844547a could not be found. [ 592.046061] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 592.046249] env[61852]: INFO nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 592.046482] env[61852]: DEBUG oslo.service.loopingcall [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 592.046696] env[61852]: DEBUG nova.compute.manager [-] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 592.046793] env[61852]: DEBUG nova.network.neutron [-] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 592.061353] env[61852]: DEBUG nova.network.neutron [-] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.131643] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.911s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.134740] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.136s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.162664] env[61852]: INFO nova.scheduler.client.report [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Deleted allocations for instance d75e131b-1933-4e1f-bcf1-62ed83779177 [ 592.219669] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.350386] env[61852]: INFO nova.compute.manager [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] [instance: a4736ffe-ad02-444d-bb6e-2cf4f70d64ee] Took 1.02 seconds to deallocate network for instance. [ 592.564063] env[61852]: DEBUG nova.network.neutron [-] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.674989] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c4575db7-6e0c-470a-bd0a-ec3a19613537 tempest-ServersAdmin275Test-1040614293 tempest-ServersAdmin275Test-1040614293-project-member] Lock "d75e131b-1933-4e1f-bcf1-62ed83779177" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.419s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.804442] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.029165] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7927828d-d091-4dff-a600-0b751b51c27d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.037678] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383499d4-e24b-45cc-847e-b41a660e5c7b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.069109] env[61852]: INFO nova.compute.manager [-] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Took 1.02 seconds to deallocate network for instance. [ 593.071454] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db03c9d-fa03-4948-8bfc-bfc2c88b30e3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.074338] env[61852]: DEBUG nova.compute.claims [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 593.074512] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.080040] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c228226-d5d1-4662-a162-e4a1f4ab4fb3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.093305] env[61852]: DEBUG nova.compute.provider_tree [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.309185] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Releasing lock "refresh_cache-d75e131b-1933-4e1f-bcf1-62ed83779177" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.309401] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Updated the network info_cache for instance {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 593.309612] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.309770] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.309915] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.310082] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.310227] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.310407] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.310538] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 593.310682] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.381677] env[61852]: INFO nova.scheduler.client.report [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Deleted allocations for instance a4736ffe-ad02-444d-bb6e-2cf4f70d64ee [ 593.596051] env[61852]: DEBUG nova.scheduler.client.report [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 593.813843] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.890933] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a0be746-9949-45f2-9697-3f4807e3aeef tempest-VolumesAssistedSnapshotsTest-136057235 tempest-VolumesAssistedSnapshotsTest-136057235-project-member] Lock "a4736ffe-ad02-444d-bb6e-2cf4f70d64ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.372s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.104620] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.970s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.105316] env[61852]: ERROR nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2ad5fcff-857c-46b7-a1e1-23ee0f089406, please check neutron logs for more information. [ 594.105316] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Traceback (most recent call last): [ 594.105316] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 594.105316] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] self.driver.spawn(context, instance, image_meta, [ 594.105316] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 594.105316] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 594.105316] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 594.105316] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] vm_ref = self.build_virtual_machine(instance, [ 594.105316] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 594.105316] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] vif_infos = vmwarevif.get_vif_info(self._session, [ 594.105316] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] for vif in network_info: [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] return self._sync_wrapper(fn, *args, **kwargs) [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] self.wait() [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] self[:] = self._gt.wait() [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] return self._exit_event.wait() [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] result = hub.switch() [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 594.105729] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] return self.greenlet.switch() [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] result = function(*args, **kwargs) [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] return func(*args, **kwargs) [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] raise e [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] nwinfo = self.network_api.allocate_for_instance( [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] created_port_ids = self._update_ports_for_instance( [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] with excutils.save_and_reraise_exception(): [ 594.106189] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] self.force_reraise() [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] raise self.value [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] updated_port = self._update_port( [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] _ensure_no_port_binding_failure(port) [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] raise exception.PortBindingFailed(port_id=port['id']) [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] nova.exception.PortBindingFailed: Binding failed for port 2ad5fcff-857c-46b7-a1e1-23ee0f089406, please check neutron logs for more information. [ 594.106615] env[61852]: ERROR nova.compute.manager [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] [ 594.106980] env[61852]: DEBUG nova.compute.utils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Binding failed for port 2ad5fcff-857c-46b7-a1e1-23ee0f089406, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 594.107283] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.619s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.108685] env[61852]: INFO nova.compute.claims [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 594.111709] env[61852]: DEBUG nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Build of instance d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6 was re-scheduled: Binding failed for port 2ad5fcff-857c-46b7-a1e1-23ee0f089406, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 594.112244] env[61852]: DEBUG nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 594.112475] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Acquiring lock "refresh_cache-d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.112620] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Acquired lock "refresh_cache-d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.112776] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 594.392877] env[61852]: DEBUG nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 594.565708] env[61852]: DEBUG nova.compute.manager [None req-b62b9326-421c-449a-b61c-68c0c664a0a5 tempest-ServerDiagnosticsV248Test-2011693375 tempest-ServerDiagnosticsV248Test-2011693375-project-admin] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 594.567364] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb26fe2c-3fb7-427c-bceb-9f0df4cd055f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.575088] env[61852]: INFO nova.compute.manager [None req-b62b9326-421c-449a-b61c-68c0c664a0a5 tempest-ServerDiagnosticsV248Test-2011693375 tempest-ServerDiagnosticsV248Test-2011693375-project-admin] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Retrieving diagnostics [ 594.575842] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0292e9-0146-40a4-8fdc-6a04973f705f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.629448] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.709519] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.918278] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.212419] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Releasing lock "refresh_cache-d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.212762] env[61852]: DEBUG nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 595.212881] env[61852]: DEBUG nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 595.213131] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 595.235745] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.485039] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2cdb819-6d4e-41ab-b9e9-8ac9f5c9f2a6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.492653] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ec4f98-00a4-4f09-b5bf-4fb51913febb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.524140] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb98733e-d9aa-48ad-be15-482c08d9b37a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.531209] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f08ff0e-1b52-4829-a950-00c5a3637ba1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.544387] env[61852]: DEBUG nova.compute.provider_tree [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.722120] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquiring lock "4ce41dca-63c6-447d-9c0a-00f9966e0093" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.722412] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lock "4ce41dca-63c6-447d-9c0a-00f9966e0093" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.722624] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquiring lock "4ce41dca-63c6-447d-9c0a-00f9966e0093-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.722802] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lock "4ce41dca-63c6-447d-9c0a-00f9966e0093-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.722965] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lock "4ce41dca-63c6-447d-9c0a-00f9966e0093-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.726286] env[61852]: INFO nova.compute.manager [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Terminating instance [ 595.727848] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquiring lock "refresh_cache-4ce41dca-63c6-447d-9c0a-00f9966e0093" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.728007] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquired lock "refresh_cache-4ce41dca-63c6-447d-9c0a-00f9966e0093" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.728182] env[61852]: DEBUG nova.network.neutron [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 595.739328] env[61852]: DEBUG nova.network.neutron [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.047993] env[61852]: DEBUG nova.scheduler.client.report [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 596.241982] env[61852]: INFO nova.compute.manager [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] [instance: d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6] Took 1.03 seconds to deallocate network for instance. [ 596.253125] env[61852]: DEBUG nova.network.neutron [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.313906] env[61852]: DEBUG nova.network.neutron [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.556609] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.449s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.557199] env[61852]: DEBUG nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 596.563360] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.382s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.816460] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Releasing lock "refresh_cache-4ce41dca-63c6-447d-9c0a-00f9966e0093" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.818511] env[61852]: DEBUG nova.compute.manager [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 596.818511] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 596.818511] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12da656-3069-4bc0-8658-91f7a247832c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.825732] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 596.826022] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c08d021-b82d-44a5-9999-ee25af471350 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.832739] env[61852]: DEBUG oslo_vmware.api [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for the task: (returnval){ [ 596.832739] env[61852]: value = "task-1292691" [ 596.832739] env[61852]: _type = "Task" [ 596.832739] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.840626] env[61852]: DEBUG oslo_vmware.api [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292691, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.074866] env[61852]: DEBUG nova.compute.utils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 597.076971] env[61852]: DEBUG nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 597.077152] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 597.171458] env[61852]: DEBUG nova.policy [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13cbeec3e12e477e96f4ac6b6215b837', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0da9e76d667448ea835ac662d226b275', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 597.273766] env[61852]: INFO nova.scheduler.client.report [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Deleted allocations for instance d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6 [ 597.345768] env[61852]: DEBUG oslo_vmware.api [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292691, 'name': PowerOffVM_Task, 'duration_secs': 0.11224} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.345768] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 597.345768] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 597.345768] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e842c94-99d8-477c-8dc7-5ed66488337f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.370107] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 597.370994] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 597.370994] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Deleting the datastore file [datastore1] 4ce41dca-63c6-447d-9c0a-00f9966e0093 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 597.370994] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50a03727-be85-4ff3-af9d-f912c65c1a7c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.377895] env[61852]: DEBUG oslo_vmware.api [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for the task: (returnval){ [ 597.377895] env[61852]: value = "task-1292693" [ 597.377895] env[61852]: _type = "Task" [ 597.377895] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.385764] env[61852]: DEBUG oslo_vmware.api [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292693, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.423332] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09bb82a-e70a-4f2d-8590-a43153a4daa9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.434136] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec80867-bad3-4acc-a217-5a136a6de5cc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.467949] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061dca4c-009f-44ec-b29c-c70b14ee15e5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.475202] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e798e86-5e92-41ca-a896-7883f16869de {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.488241] env[61852]: DEBUG nova.compute.provider_tree [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.577988] env[61852]: DEBUG nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 597.583853] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Successfully created port: db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 597.784172] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae5b71e3-cfe9-497a-b026-ed3f79eb1689 tempest-ServersTestJSON-687101285 tempest-ServersTestJSON-687101285-project-member] Lock "d32e73d1-f7d9-4ee0-bbc3-44ba1abac9f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.073s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.887935] env[61852]: DEBUG oslo_vmware.api [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Task: {'id': task-1292693, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089452} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.888286] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 597.888510] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 597.888719] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 597.888925] env[61852]: INFO nova.compute.manager [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Took 1.07 seconds to destroy the instance on the hypervisor. [ 597.889262] env[61852]: DEBUG oslo.service.loopingcall [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 597.889484] env[61852]: DEBUG nova.compute.manager [-] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 597.889615] env[61852]: DEBUG nova.network.neutron [-] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 597.908091] env[61852]: DEBUG nova.network.neutron [-] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.991150] env[61852]: DEBUG nova.scheduler.client.report [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 598.286306] env[61852]: DEBUG nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 598.293532] env[61852]: DEBUG nova.compute.manager [req-ce87d7ff-8b5f-4a72-8377-a60c605b483c req-8d24b6d6-a8f9-4a72-a782-08b4c2a902d1 service nova] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Received event network-changed-db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 598.294233] env[61852]: DEBUG nova.compute.manager [req-ce87d7ff-8b5f-4a72-8377-a60c605b483c req-8d24b6d6-a8f9-4a72-a782-08b4c2a902d1 service nova] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Refreshing instance network info cache due to event network-changed-db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 598.295039] env[61852]: DEBUG oslo_concurrency.lockutils [req-ce87d7ff-8b5f-4a72-8377-a60c605b483c req-8d24b6d6-a8f9-4a72-a782-08b4c2a902d1 service nova] Acquiring lock "refresh_cache-2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.295571] env[61852]: DEBUG oslo_concurrency.lockutils [req-ce87d7ff-8b5f-4a72-8377-a60c605b483c req-8d24b6d6-a8f9-4a72-a782-08b4c2a902d1 service nova] Acquired lock "refresh_cache-2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.299431] env[61852]: DEBUG nova.network.neutron [req-ce87d7ff-8b5f-4a72-8377-a60c605b483c req-8d24b6d6-a8f9-4a72-a782-08b4c2a902d1 service nova] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Refreshing network info cache for port db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 598.410315] env[61852]: DEBUG nova.network.neutron [-] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.457377] env[61852]: ERROR nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b, please check neutron logs for more information. [ 598.457377] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 598.457377] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 598.457377] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 598.457377] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 598.457377] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 598.457377] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 598.457377] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 598.457377] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.457377] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 598.457377] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.457377] env[61852]: ERROR nova.compute.manager raise self.value [ 598.457377] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 598.457377] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 598.457377] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.457377] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 598.457887] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.457887] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 598.457887] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b, please check neutron logs for more information. [ 598.457887] env[61852]: ERROR nova.compute.manager [ 598.457887] env[61852]: Traceback (most recent call last): [ 598.457887] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 598.457887] env[61852]: listener.cb(fileno) [ 598.457887] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.457887] env[61852]: result = function(*args, **kwargs) [ 598.457887] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 598.457887] env[61852]: return func(*args, **kwargs) [ 598.457887] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 598.457887] env[61852]: raise e [ 598.457887] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 598.457887] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 598.457887] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 598.457887] env[61852]: created_port_ids = self._update_ports_for_instance( [ 598.457887] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 598.457887] env[61852]: with excutils.save_and_reraise_exception(): [ 598.457887] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.457887] env[61852]: self.force_reraise() [ 598.457887] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.457887] env[61852]: raise self.value [ 598.457887] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 598.457887] env[61852]: updated_port = self._update_port( [ 598.457887] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.457887] env[61852]: _ensure_no_port_binding_failure(port) [ 598.457887] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.457887] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 598.458795] env[61852]: nova.exception.PortBindingFailed: Binding failed for port db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b, please check neutron logs for more information. [ 598.458795] env[61852]: Removing descriptor: 19 [ 598.496445] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.933s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.497373] env[61852]: ERROR nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1175d90f-7ea1-4565-aa2c-fb93a0a3db16, please check neutron logs for more information. [ 598.497373] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Traceback (most recent call last): [ 598.497373] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 598.497373] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] self.driver.spawn(context, instance, image_meta, [ 598.497373] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 598.497373] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.497373] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 598.497373] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] vm_ref = self.build_virtual_machine(instance, [ 598.497373] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 598.497373] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] vif_infos = vmwarevif.get_vif_info(self._session, [ 598.497373] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] for vif in network_info: [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] return self._sync_wrapper(fn, *args, **kwargs) [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] self.wait() [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] self[:] = self._gt.wait() [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] return self._exit_event.wait() [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] current.throw(*self._exc) [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.497713] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] result = function(*args, **kwargs) [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] return func(*args, **kwargs) [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] raise e [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] nwinfo = self.network_api.allocate_for_instance( [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] created_port_ids = self._update_ports_for_instance( [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] with excutils.save_and_reraise_exception(): [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] self.force_reraise() [ 598.498205] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.498569] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] raise self.value [ 598.498569] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 598.498569] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] updated_port = self._update_port( [ 598.498569] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.498569] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] _ensure_no_port_binding_failure(port) [ 598.498569] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.498569] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] raise exception.PortBindingFailed(port_id=port['id']) [ 598.498569] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] nova.exception.PortBindingFailed: Binding failed for port 1175d90f-7ea1-4565-aa2c-fb93a0a3db16, please check neutron logs for more information. [ 598.498569] env[61852]: ERROR nova.compute.manager [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] [ 598.498569] env[61852]: DEBUG nova.compute.utils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Binding failed for port 1175d90f-7ea1-4565-aa2c-fb93a0a3db16, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 598.500035] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.504s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.501770] env[61852]: INFO nova.compute.claims [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.504430] env[61852]: DEBUG nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Build of instance 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a was re-scheduled: Binding failed for port 1175d90f-7ea1-4565-aa2c-fb93a0a3db16, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 598.504941] env[61852]: DEBUG nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 598.505305] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquiring lock "refresh_cache-57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.505516] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Acquired lock "refresh_cache-57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.505749] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 598.587821] env[61852]: DEBUG nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 598.617288] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 598.617596] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 598.617759] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 598.617935] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 598.618422] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 598.618763] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 598.619080] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 598.619362] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 598.619600] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 598.620033] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 598.620783] env[61852]: DEBUG nova.virt.hardware [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 598.622337] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfab802-aebc-41c6-95cd-dd35429cc5e2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.630753] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c495e19-9516-4f68-b5f3-d6a18c9b9118 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.645831] env[61852]: ERROR nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b, please check neutron logs for more information. [ 598.645831] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Traceback (most recent call last): [ 598.645831] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 598.645831] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] yield resources [ 598.645831] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 598.645831] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] self.driver.spawn(context, instance, image_meta, [ 598.645831] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 598.645831] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.645831] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 598.645831] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] vm_ref = self.build_virtual_machine(instance, [ 598.645831] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] vif_infos = vmwarevif.get_vif_info(self._session, [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] for vif in network_info: [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] return self._sync_wrapper(fn, *args, **kwargs) [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] self.wait() [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] self[:] = self._gt.wait() [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] return self._exit_event.wait() [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 598.646380] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] current.throw(*self._exc) [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] result = function(*args, **kwargs) [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] return func(*args, **kwargs) [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] raise e [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] nwinfo = self.network_api.allocate_for_instance( [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] created_port_ids = self._update_ports_for_instance( [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] with excutils.save_and_reraise_exception(): [ 598.646788] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] self.force_reraise() [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] raise self.value [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] updated_port = self._update_port( [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] _ensure_no_port_binding_failure(port) [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] raise exception.PortBindingFailed(port_id=port['id']) [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] nova.exception.PortBindingFailed: Binding failed for port db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b, please check neutron logs for more information. [ 598.647218] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] [ 598.647218] env[61852]: INFO nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Terminating instance [ 598.648515] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Acquiring lock "refresh_cache-2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.823227] env[61852]: DEBUG nova.network.neutron [req-ce87d7ff-8b5f-4a72-8377-a60c605b483c req-8d24b6d6-a8f9-4a72-a782-08b4c2a902d1 service nova] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.832813] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.890094] env[61852]: DEBUG nova.network.neutron [req-ce87d7ff-8b5f-4a72-8377-a60c605b483c req-8d24b6d6-a8f9-4a72-a782-08b4c2a902d1 service nova] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.916278] env[61852]: INFO nova.compute.manager [-] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Took 1.03 seconds to deallocate network for instance. [ 599.027154] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.102259] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.393011] env[61852]: DEBUG oslo_concurrency.lockutils [req-ce87d7ff-8b5f-4a72-8377-a60c605b483c req-8d24b6d6-a8f9-4a72-a782-08b4c2a902d1 service nova] Releasing lock "refresh_cache-2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.393435] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Acquired lock "refresh_cache-2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.393619] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 599.425648] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.608553] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Releasing lock "refresh_cache-57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.608791] env[61852]: DEBUG nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 599.609063] env[61852]: DEBUG nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 599.609278] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 599.638858] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.863109] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bd1708-bbc3-4a69-83c2-fa81b2b9782e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.870041] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aff48f7-0874-4e81-b862-a314752c02ec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.900817] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce853bdc-c295-4f6a-8e1a-e1389728ca77 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.907982] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8822f45-7156-475d-a09b-394b00b84522 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.921281] env[61852]: DEBUG nova.compute.provider_tree [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.922829] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.012755] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.143295] env[61852]: DEBUG nova.network.neutron [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.365781] env[61852]: DEBUG nova.compute.manager [req-ab53b06e-8e5e-46c2-b4ad-15d28cc0dcaf req-127bc501-cfff-4316-b8fb-22d65355a1f5 service nova] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Received event network-vif-deleted-db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 600.426116] env[61852]: DEBUG nova.scheduler.client.report [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.520452] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Releasing lock "refresh_cache-2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.522037] env[61852]: DEBUG nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 600.522251] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 600.522551] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36733266-fe32-493b-afec-2293d0034ea2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.534513] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72599bf4-a5bc-4ac0-9b33-dde6f033c8aa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.564304] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce could not be found. [ 600.564527] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 600.564707] env[61852]: INFO nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Took 0.04 seconds to destroy the instance on the hypervisor. [ 600.564982] env[61852]: DEBUG oslo.service.loopingcall [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.565239] env[61852]: DEBUG nova.compute.manager [-] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 600.565313] env[61852]: DEBUG nova.network.neutron [-] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 600.587169] env[61852]: DEBUG nova.network.neutron [-] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.645845] env[61852]: INFO nova.compute.manager [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] [instance: 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a] Took 1.04 seconds to deallocate network for instance. [ 600.933952] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.934531] env[61852]: DEBUG nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 600.938324] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.415s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.939048] env[61852]: INFO nova.compute.claims [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.089947] env[61852]: DEBUG nova.network.neutron [-] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.446368] env[61852]: DEBUG nova.compute.utils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 601.451113] env[61852]: DEBUG nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 601.451376] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 601.516568] env[61852]: DEBUG nova.policy [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5676025bc93e41a6957800c80743c534', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0dfb1aa267b4cf49f90a1d161a5780b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 601.595168] env[61852]: INFO nova.compute.manager [-] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Took 1.03 seconds to deallocate network for instance. [ 601.597584] env[61852]: DEBUG nova.compute.claims [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 601.597799] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.688074] env[61852]: INFO nova.scheduler.client.report [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Deleted allocations for instance 57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a [ 601.956204] env[61852]: DEBUG nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 601.971746] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Successfully created port: d0905238-564c-4155-9c65-582d6c76eed1 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.197795] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4ec930a8-0a76-4a59-bded-aa78d16f9f1e tempest-ListServerFiltersTestJSON-259442648 tempest-ListServerFiltersTestJSON-259442648-project-member] Lock "57a53e9f-1ca6-4f1e-9ac1-bbf0799a5c1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.698s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.327246] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2ba38c-dc31-4358-ba03-6afb9763eb37 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.334945] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcbf8f4-0d1a-4199-bd92-235501af025f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.366780] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4258a6c4-3a54-4d47-8833-13c210e5c22f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.374190] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2000650-bb59-4acf-80cd-900be880533a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.388022] env[61852]: DEBUG nova.compute.provider_tree [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.701186] env[61852]: DEBUG nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 602.731703] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "694889e8-200e-454c-9e87-60521dd044d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.731980] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "694889e8-200e-454c-9e87-60521dd044d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.891473] env[61852]: DEBUG nova.scheduler.client.report [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.928759] env[61852]: DEBUG nova.compute.manager [req-21df691a-0bd5-4322-8d33-21c860b5bdde req-ed268a0b-4f9b-4505-a8a3-e22e005a347e service nova] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Received event network-changed-d0905238-564c-4155-9c65-582d6c76eed1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 602.929058] env[61852]: DEBUG nova.compute.manager [req-21df691a-0bd5-4322-8d33-21c860b5bdde req-ed268a0b-4f9b-4505-a8a3-e22e005a347e service nova] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Refreshing instance network info cache due to event network-changed-d0905238-564c-4155-9c65-582d6c76eed1. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 602.929193] env[61852]: DEBUG oslo_concurrency.lockutils [req-21df691a-0bd5-4322-8d33-21c860b5bdde req-ed268a0b-4f9b-4505-a8a3-e22e005a347e service nova] Acquiring lock "refresh_cache-0f6293bd-3096-4deb-a388-9a3e8b2e5926" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.929411] env[61852]: DEBUG oslo_concurrency.lockutils [req-21df691a-0bd5-4322-8d33-21c860b5bdde req-ed268a0b-4f9b-4505-a8a3-e22e005a347e service nova] Acquired lock "refresh_cache-0f6293bd-3096-4deb-a388-9a3e8b2e5926" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.929481] env[61852]: DEBUG nova.network.neutron [req-21df691a-0bd5-4322-8d33-21c860b5bdde req-ed268a0b-4f9b-4505-a8a3-e22e005a347e service nova] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Refreshing network info cache for port d0905238-564c-4155-9c65-582d6c76eed1 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 602.948953] env[61852]: ERROR nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d0905238-564c-4155-9c65-582d6c76eed1, please check neutron logs for more information. [ 602.948953] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 602.948953] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.948953] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 602.948953] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 602.948953] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 602.948953] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 602.948953] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 602.948953] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.948953] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 602.948953] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.948953] env[61852]: ERROR nova.compute.manager raise self.value [ 602.948953] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 602.948953] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 602.948953] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.948953] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 602.949544] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.949544] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 602.949544] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d0905238-564c-4155-9c65-582d6c76eed1, please check neutron logs for more information. [ 602.949544] env[61852]: ERROR nova.compute.manager [ 602.949544] env[61852]: Traceback (most recent call last): [ 602.949544] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 602.949544] env[61852]: listener.cb(fileno) [ 602.949544] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.949544] env[61852]: result = function(*args, **kwargs) [ 602.949544] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.949544] env[61852]: return func(*args, **kwargs) [ 602.949544] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.949544] env[61852]: raise e [ 602.949544] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.949544] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 602.949544] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 602.949544] env[61852]: created_port_ids = self._update_ports_for_instance( [ 602.949544] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 602.949544] env[61852]: with excutils.save_and_reraise_exception(): [ 602.949544] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.949544] env[61852]: self.force_reraise() [ 602.949544] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.949544] env[61852]: raise self.value [ 602.949544] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 602.949544] env[61852]: updated_port = self._update_port( [ 602.949544] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.949544] env[61852]: _ensure_no_port_binding_failure(port) [ 602.949544] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.949544] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 602.950537] env[61852]: nova.exception.PortBindingFailed: Binding failed for port d0905238-564c-4155-9c65-582d6c76eed1, please check neutron logs for more information. [ 602.950537] env[61852]: Removing descriptor: 19 [ 602.969408] env[61852]: DEBUG nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 602.995513] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 602.995762] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 602.995913] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.996110] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 602.996258] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.996401] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 602.996597] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 602.996748] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 602.996905] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 602.997142] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 602.997336] env[61852]: DEBUG nova.virt.hardware [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.998470] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583d7fa8-8d61-4a74-9997-561ff97d074a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.006848] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9407fe1-7be5-4723-a918-96ecdb161324 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.026044] env[61852]: ERROR nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d0905238-564c-4155-9c65-582d6c76eed1, please check neutron logs for more information. [ 603.026044] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Traceback (most recent call last): [ 603.026044] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 603.026044] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] yield resources [ 603.026044] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 603.026044] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] self.driver.spawn(context, instance, image_meta, [ 603.026044] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 603.026044] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] self._vmops.spawn(context, instance, image_meta, injected_files, [ 603.026044] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 603.026044] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] vm_ref = self.build_virtual_machine(instance, [ 603.026044] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] vif_infos = vmwarevif.get_vif_info(self._session, [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] for vif in network_info: [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] return self._sync_wrapper(fn, *args, **kwargs) [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] self.wait() [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] self[:] = self._gt.wait() [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] return self._exit_event.wait() [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 603.026443] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] current.throw(*self._exc) [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] result = function(*args, **kwargs) [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] return func(*args, **kwargs) [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] raise e [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] nwinfo = self.network_api.allocate_for_instance( [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] created_port_ids = self._update_ports_for_instance( [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] with excutils.save_and_reraise_exception(): [ 603.026835] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] self.force_reraise() [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] raise self.value [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] updated_port = self._update_port( [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] _ensure_no_port_binding_failure(port) [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] raise exception.PortBindingFailed(port_id=port['id']) [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] nova.exception.PortBindingFailed: Binding failed for port d0905238-564c-4155-9c65-582d6c76eed1, please check neutron logs for more information. [ 603.027296] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] [ 603.027296] env[61852]: INFO nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Terminating instance [ 603.028277] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquiring lock "refresh_cache-0f6293bd-3096-4deb-a388-9a3e8b2e5926" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.222718] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.399454] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.399980] env[61852]: DEBUG nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 603.402701] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.702s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.404153] env[61852]: INFO nova.compute.claims [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.451996] env[61852]: DEBUG nova.network.neutron [req-21df691a-0bd5-4322-8d33-21c860b5bdde req-ed268a0b-4f9b-4505-a8a3-e22e005a347e service nova] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.525014] env[61852]: DEBUG nova.network.neutron [req-21df691a-0bd5-4322-8d33-21c860b5bdde req-ed268a0b-4f9b-4505-a8a3-e22e005a347e service nova] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.910861] env[61852]: DEBUG nova.compute.utils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 603.912536] env[61852]: DEBUG nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 603.912727] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.982847] env[61852]: DEBUG nova.policy [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5676025bc93e41a6957800c80743c534', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0dfb1aa267b4cf49f90a1d161a5780b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 604.027774] env[61852]: DEBUG oslo_concurrency.lockutils [req-21df691a-0bd5-4322-8d33-21c860b5bdde req-ed268a0b-4f9b-4505-a8a3-e22e005a347e service nova] Releasing lock "refresh_cache-0f6293bd-3096-4deb-a388-9a3e8b2e5926" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.028231] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquired lock "refresh_cache-0f6293bd-3096-4deb-a388-9a3e8b2e5926" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.028424] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 604.320054] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Successfully created port: fb2772c4-b414-4d3e-ad70-0798bb712b3d {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.419204] env[61852]: DEBUG nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 604.560595] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.737352] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.833228] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f275fe86-7135-43cd-9c04-9c45bc0b3cb0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.840655] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e6ea9f-2688-47c8-bf71-b34fca34c40f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.871609] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38cfd47-2ea2-4ded-82f1-0c45a867f02f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.879284] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c265e90-605d-43cb-8d36-c881d01d1e83 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.893711] env[61852]: DEBUG nova.compute.provider_tree [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.955885] env[61852]: DEBUG nova.compute.manager [req-ceafebac-f1f1-45e4-aeb2-3c258c416a34 req-2652d8db-4267-4bff-ae5e-e9b925b5d282 service nova] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Received event network-vif-deleted-d0905238-564c-4155-9c65-582d6c76eed1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 605.246713] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Releasing lock "refresh_cache-0f6293bd-3096-4deb-a388-9a3e8b2e5926" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.246713] env[61852]: DEBUG nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 605.246713] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.246713] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61e1c945-81ad-4992-8508-ca938fc163f2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.255398] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90a1ce7-30d2-41d4-92fc-d993971c5e0e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.285140] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0f6293bd-3096-4deb-a388-9a3e8b2e5926 could not be found. [ 605.285631] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 605.285942] env[61852]: INFO nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Took 0.04 seconds to destroy the instance on the hypervisor. [ 605.286325] env[61852]: DEBUG oslo.service.loopingcall [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.286678] env[61852]: DEBUG nova.compute.manager [-] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 605.286910] env[61852]: DEBUG nova.network.neutron [-] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 605.311023] env[61852]: DEBUG nova.network.neutron [-] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.400018] env[61852]: DEBUG nova.scheduler.client.report [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.431531] env[61852]: DEBUG nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 605.457095] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.457095] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.457095] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.457532] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.457532] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.457532] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.458330] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.458648] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.459984] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.459984] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.459984] env[61852]: DEBUG nova.virt.hardware [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.461394] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05628ee1-1a8a-416e-afdf-135ebcd76524 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.470821] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42077ad8-428e-458e-bf45-8b0a6df985a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.503388] env[61852]: ERROR nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fb2772c4-b414-4d3e-ad70-0798bb712b3d, please check neutron logs for more information. [ 605.503388] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 605.503388] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.503388] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 605.503388] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.503388] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 605.503388] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.503388] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 605.503388] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.503388] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 605.503388] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.503388] env[61852]: ERROR nova.compute.manager raise self.value [ 605.503388] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.503388] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 605.503388] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.503388] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 605.504077] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.504077] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 605.504077] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fb2772c4-b414-4d3e-ad70-0798bb712b3d, please check neutron logs for more information. [ 605.504077] env[61852]: ERROR nova.compute.manager [ 605.504077] env[61852]: Traceback (most recent call last): [ 605.504077] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 605.504077] env[61852]: listener.cb(fileno) [ 605.504077] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.504077] env[61852]: result = function(*args, **kwargs) [ 605.504077] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 605.504077] env[61852]: return func(*args, **kwargs) [ 605.504077] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 605.504077] env[61852]: raise e [ 605.504077] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.504077] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 605.504077] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.504077] env[61852]: created_port_ids = self._update_ports_for_instance( [ 605.504077] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.504077] env[61852]: with excutils.save_and_reraise_exception(): [ 605.504077] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.504077] env[61852]: self.force_reraise() [ 605.504077] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.504077] env[61852]: raise self.value [ 605.504077] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.504077] env[61852]: updated_port = self._update_port( [ 605.504077] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.504077] env[61852]: _ensure_no_port_binding_failure(port) [ 605.504077] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.504077] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 605.505223] env[61852]: nova.exception.PortBindingFailed: Binding failed for port fb2772c4-b414-4d3e-ad70-0798bb712b3d, please check neutron logs for more information. [ 605.505223] env[61852]: Removing descriptor: 19 [ 605.505223] env[61852]: ERROR nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fb2772c4-b414-4d3e-ad70-0798bb712b3d, please check neutron logs for more information. [ 605.505223] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Traceback (most recent call last): [ 605.505223] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 605.505223] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] yield resources [ 605.505223] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 605.505223] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] self.driver.spawn(context, instance, image_meta, [ 605.505223] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 605.505223] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 605.505223] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 605.505223] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] vm_ref = self.build_virtual_machine(instance, [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] vif_infos = vmwarevif.get_vif_info(self._session, [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] for vif in network_info: [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] return self._sync_wrapper(fn, *args, **kwargs) [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] self.wait() [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] self[:] = self._gt.wait() [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] return self._exit_event.wait() [ 605.505568] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] result = hub.switch() [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] return self.greenlet.switch() [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] result = function(*args, **kwargs) [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] return func(*args, **kwargs) [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] raise e [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] nwinfo = self.network_api.allocate_for_instance( [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.505814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] created_port_ids = self._update_ports_for_instance( [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] with excutils.save_and_reraise_exception(): [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] self.force_reraise() [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] raise self.value [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] updated_port = self._update_port( [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] _ensure_no_port_binding_failure(port) [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.506814] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] raise exception.PortBindingFailed(port_id=port['id']) [ 605.507258] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] nova.exception.PortBindingFailed: Binding failed for port fb2772c4-b414-4d3e-ad70-0798bb712b3d, please check neutron logs for more information. [ 605.507258] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] [ 605.507258] env[61852]: INFO nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Terminating instance [ 605.507258] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquiring lock "refresh_cache-068ced45-4c50-4cfd-bd94-fa1dad29e5b5" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.507258] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquired lock "refresh_cache-068ced45-4c50-4cfd-bd94-fa1dad29e5b5" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.507258] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.813210] env[61852]: DEBUG nova.network.neutron [-] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.902620] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.903011] env[61852]: DEBUG nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 605.905657] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.592s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.036329] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.178651] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.316784] env[61852]: INFO nova.compute.manager [-] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Took 1.03 seconds to deallocate network for instance. [ 606.320555] env[61852]: DEBUG nova.compute.claims [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 606.320733] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.413375] env[61852]: DEBUG nova.compute.utils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 606.419659] env[61852]: DEBUG nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 606.420508] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 606.514640] env[61852]: DEBUG nova.policy [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6271ea2f00a546cd8ed4d556f530ab5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfa77202117a445381073c35c04e557e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 606.684799] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Releasing lock "refresh_cache-068ced45-4c50-4cfd-bd94-fa1dad29e5b5" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.685689] env[61852]: DEBUG nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 606.685896] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 606.686413] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c700eae-2558-4cc7-b32c-8e5c256c3c85 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.700968] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3002e0be-273e-4ade-92ca-dd0ceceed942 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.725098] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 068ced45-4c50-4cfd-bd94-fa1dad29e5b5 could not be found. [ 606.725342] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 606.725522] env[61852]: INFO nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 606.725769] env[61852]: DEBUG oslo.service.loopingcall [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.728664] env[61852]: DEBUG nova.compute.manager [-] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 606.728770] env[61852]: DEBUG nova.network.neutron [-] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 606.749988] env[61852]: DEBUG nova.network.neutron [-] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.838035] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4befae8-7443-4f80-9f6b-bfd4da652ba4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.845439] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51863558-fbe1-4875-b4bf-a06a97ea9975 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.877785] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1230c28b-610c-4c6d-9654-d0986200e929 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.886529] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d239ef1-31d4-4cec-93f2-9525d34fc9df {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.900172] env[61852]: DEBUG nova.compute.provider_tree [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.920058] env[61852]: DEBUG nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 607.010399] env[61852]: DEBUG nova.compute.manager [req-9a24b103-5a58-4eab-8b1e-0d0bf57f82b3 req-7fc77775-80f9-4dc6-92d1-4fc0bcad0e1c service nova] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Received event network-changed-fb2772c4-b414-4d3e-ad70-0798bb712b3d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 607.010399] env[61852]: DEBUG nova.compute.manager [req-9a24b103-5a58-4eab-8b1e-0d0bf57f82b3 req-7fc77775-80f9-4dc6-92d1-4fc0bcad0e1c service nova] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Refreshing instance network info cache due to event network-changed-fb2772c4-b414-4d3e-ad70-0798bb712b3d. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 607.010399] env[61852]: DEBUG oslo_concurrency.lockutils [req-9a24b103-5a58-4eab-8b1e-0d0bf57f82b3 req-7fc77775-80f9-4dc6-92d1-4fc0bcad0e1c service nova] Acquiring lock "refresh_cache-068ced45-4c50-4cfd-bd94-fa1dad29e5b5" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.010399] env[61852]: DEBUG oslo_concurrency.lockutils [req-9a24b103-5a58-4eab-8b1e-0d0bf57f82b3 req-7fc77775-80f9-4dc6-92d1-4fc0bcad0e1c service nova] Acquired lock "refresh_cache-068ced45-4c50-4cfd-bd94-fa1dad29e5b5" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.010399] env[61852]: DEBUG nova.network.neutron [req-9a24b103-5a58-4eab-8b1e-0d0bf57f82b3 req-7fc77775-80f9-4dc6-92d1-4fc0bcad0e1c service nova] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Refreshing network info cache for port fb2772c4-b414-4d3e-ad70-0798bb712b3d {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 607.045672] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Successfully created port: 70b22784-bf3f-4e3e-a446-065ef48c6697 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 607.252746] env[61852]: DEBUG nova.network.neutron [-] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.407294] env[61852]: DEBUG nova.scheduler.client.report [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 607.538972] env[61852]: DEBUG nova.network.neutron [req-9a24b103-5a58-4eab-8b1e-0d0bf57f82b3 req-7fc77775-80f9-4dc6-92d1-4fc0bcad0e1c service nova] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.663342] env[61852]: DEBUG nova.network.neutron [req-9a24b103-5a58-4eab-8b1e-0d0bf57f82b3 req-7fc77775-80f9-4dc6-92d1-4fc0bcad0e1c service nova] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.756612] env[61852]: INFO nova.compute.manager [-] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Took 1.03 seconds to deallocate network for instance. [ 607.760033] env[61852]: DEBUG nova.compute.claims [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 607.760033] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.910731] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.005s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.911424] env[61852]: ERROR nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port df6c589a-e0da-470c-8c06-ce35356367e9, please check neutron logs for more information. [ 607.911424] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Traceback (most recent call last): [ 607.911424] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 607.911424] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] self.driver.spawn(context, instance, image_meta, [ 607.911424] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 607.911424] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] self._vmops.spawn(context, instance, image_meta, injected_files, [ 607.911424] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 607.911424] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] vm_ref = self.build_virtual_machine(instance, [ 607.911424] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 607.911424] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] vif_infos = vmwarevif.get_vif_info(self._session, [ 607.911424] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] for vif in network_info: [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] return self._sync_wrapper(fn, *args, **kwargs) [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] self.wait() [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] self[:] = self._gt.wait() [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] return self._exit_event.wait() [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] current.throw(*self._exc) [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.911682] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] result = function(*args, **kwargs) [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] return func(*args, **kwargs) [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] raise e [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] nwinfo = self.network_api.allocate_for_instance( [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] created_port_ids = self._update_ports_for_instance( [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] with excutils.save_and_reraise_exception(): [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] self.force_reraise() [ 607.911971] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.912283] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] raise self.value [ 607.912283] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.912283] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] updated_port = self._update_port( [ 607.912283] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.912283] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] _ensure_no_port_binding_failure(port) [ 607.912283] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.912283] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] raise exception.PortBindingFailed(port_id=port['id']) [ 607.912283] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] nova.exception.PortBindingFailed: Binding failed for port df6c589a-e0da-470c-8c06-ce35356367e9, please check neutron logs for more information. [ 607.912283] env[61852]: ERROR nova.compute.manager [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] [ 607.912283] env[61852]: DEBUG nova.compute.utils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Binding failed for port df6c589a-e0da-470c-8c06-ce35356367e9, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 607.915732] env[61852]: DEBUG nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Build of instance e9a7c08d-e021-43d0-b757-6ad0174b4648 was re-scheduled: Binding failed for port df6c589a-e0da-470c-8c06-ce35356367e9, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 607.916284] env[61852]: DEBUG nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 607.916926] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Acquiring lock "refresh_cache-e9a7c08d-e021-43d0-b757-6ad0174b4648" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.916926] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Acquired lock "refresh_cache-e9a7c08d-e021-43d0-b757-6ad0174b4648" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.916926] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 607.918079] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.853s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.935747] env[61852]: DEBUG nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 607.974245] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:20:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='92c15cf7-8791-4ef2-b415-57bf08908c00',id=30,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1773871421',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 607.974245] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 607.974245] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.974374] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 607.974374] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.974374] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 607.974374] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 607.974374] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 607.978056] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 607.978056] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 607.978056] env[61852]: DEBUG nova.virt.hardware [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 607.979375] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83701fc1-9d31-4d20-9a1f-fd15b2da8d19 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.989874] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c9030c-3a26-4273-a285-54192d8e2286 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.168063] env[61852]: DEBUG oslo_concurrency.lockutils [req-9a24b103-5a58-4eab-8b1e-0d0bf57f82b3 req-7fc77775-80f9-4dc6-92d1-4fc0bcad0e1c service nova] Releasing lock "refresh_cache-068ced45-4c50-4cfd-bd94-fa1dad29e5b5" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.168063] env[61852]: DEBUG nova.compute.manager [req-9a24b103-5a58-4eab-8b1e-0d0bf57f82b3 req-7fc77775-80f9-4dc6-92d1-4fc0bcad0e1c service nova] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Received event network-vif-deleted-fb2772c4-b414-4d3e-ad70-0798bb712b3d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 608.226103] env[61852]: ERROR nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 70b22784-bf3f-4e3e-a446-065ef48c6697, please check neutron logs for more information. [ 608.226103] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 608.226103] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.226103] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 608.226103] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.226103] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 608.226103] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.226103] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 608.226103] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.226103] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 608.226103] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.226103] env[61852]: ERROR nova.compute.manager raise self.value [ 608.226103] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.226103] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 608.226103] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.226103] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 608.226619] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.226619] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 608.226619] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 70b22784-bf3f-4e3e-a446-065ef48c6697, please check neutron logs for more information. [ 608.226619] env[61852]: ERROR nova.compute.manager [ 608.226619] env[61852]: Traceback (most recent call last): [ 608.226619] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 608.226619] env[61852]: listener.cb(fileno) [ 608.226619] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.226619] env[61852]: result = function(*args, **kwargs) [ 608.226619] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 608.226619] env[61852]: return func(*args, **kwargs) [ 608.226619] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.226619] env[61852]: raise e [ 608.226619] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.226619] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 608.226619] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.226619] env[61852]: created_port_ids = self._update_ports_for_instance( [ 608.226619] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.226619] env[61852]: with excutils.save_and_reraise_exception(): [ 608.226619] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.226619] env[61852]: self.force_reraise() [ 608.226619] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.226619] env[61852]: raise self.value [ 608.226619] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.226619] env[61852]: updated_port = self._update_port( [ 608.226619] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.226619] env[61852]: _ensure_no_port_binding_failure(port) [ 608.226619] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.226619] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 608.227249] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 70b22784-bf3f-4e3e-a446-065ef48c6697, please check neutron logs for more information. [ 608.227249] env[61852]: Removing descriptor: 19 [ 608.227249] env[61852]: ERROR nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 70b22784-bf3f-4e3e-a446-065ef48c6697, please check neutron logs for more information. [ 608.227249] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Traceback (most recent call last): [ 608.227249] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 608.227249] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] yield resources [ 608.227249] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 608.227249] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] self.driver.spawn(context, instance, image_meta, [ 608.227249] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 608.227249] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 608.227249] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 608.227249] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] vm_ref = self.build_virtual_machine(instance, [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] vif_infos = vmwarevif.get_vif_info(self._session, [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] for vif in network_info: [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] return self._sync_wrapper(fn, *args, **kwargs) [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] self.wait() [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] self[:] = self._gt.wait() [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] return self._exit_event.wait() [ 608.227517] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] result = hub.switch() [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] return self.greenlet.switch() [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] result = function(*args, **kwargs) [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] return func(*args, **kwargs) [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] raise e [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] nwinfo = self.network_api.allocate_for_instance( [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.227806] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] created_port_ids = self._update_ports_for_instance( [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] with excutils.save_and_reraise_exception(): [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] self.force_reraise() [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] raise self.value [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] updated_port = self._update_port( [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] _ensure_no_port_binding_failure(port) [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.228140] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] raise exception.PortBindingFailed(port_id=port['id']) [ 608.228465] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] nova.exception.PortBindingFailed: Binding failed for port 70b22784-bf3f-4e3e-a446-065ef48c6697, please check neutron logs for more information. [ 608.228465] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] [ 608.228465] env[61852]: INFO nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Terminating instance [ 608.229239] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquiring lock "refresh_cache-8c872e97-44ca-48c9-b7bb-02dca695ad8a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.229509] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquired lock "refresh_cache-8c872e97-44ca-48c9-b7bb-02dca695ad8a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.229769] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 608.451029] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.534862] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.757134] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.826313] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.828155] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Acquiring lock "d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.828379] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Lock "d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.841186] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9578db9-f8d5-46c5-b8df-d0634c2e58f8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.855525] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bff917-b221-49b0-87cb-539539d1cf3b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.889595] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb84eaca-6a11-4c77-9c7c-0701a2b67957 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.899205] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48497486-30a7-4c42-99f5-b5ea8cc0c34d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.913778] env[61852]: DEBUG nova.compute.provider_tree [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 608.937429] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "a77ddc8b-f3b2-4e13-944d-5cafecf59fae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.937914] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "a77ddc8b-f3b2-4e13-944d-5cafecf59fae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.037093] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Releasing lock "refresh_cache-e9a7c08d-e021-43d0-b757-6ad0174b4648" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.037341] env[61852]: DEBUG nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 609.037517] env[61852]: DEBUG nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 609.037683] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 609.055912] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.114738] env[61852]: DEBUG nova.compute.manager [req-ad6524c7-8d31-4b40-9af6-3537876e0243 req-064917d0-1b9a-4d7d-b47a-c31ce6e367a8 service nova] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Received event network-changed-70b22784-bf3f-4e3e-a446-065ef48c6697 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 609.114900] env[61852]: DEBUG nova.compute.manager [req-ad6524c7-8d31-4b40-9af6-3537876e0243 req-064917d0-1b9a-4d7d-b47a-c31ce6e367a8 service nova] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Refreshing instance network info cache due to event network-changed-70b22784-bf3f-4e3e-a446-065ef48c6697. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 609.115132] env[61852]: DEBUG oslo_concurrency.lockutils [req-ad6524c7-8d31-4b40-9af6-3537876e0243 req-064917d0-1b9a-4d7d-b47a-c31ce6e367a8 service nova] Acquiring lock "refresh_cache-8c872e97-44ca-48c9-b7bb-02dca695ad8a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.330334] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Releasing lock "refresh_cache-8c872e97-44ca-48c9-b7bb-02dca695ad8a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.330795] env[61852]: DEBUG nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 609.331049] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 609.331376] env[61852]: DEBUG oslo_concurrency.lockutils [req-ad6524c7-8d31-4b40-9af6-3537876e0243 req-064917d0-1b9a-4d7d-b47a-c31ce6e367a8 service nova] Acquired lock "refresh_cache-8c872e97-44ca-48c9-b7bb-02dca695ad8a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.331549] env[61852]: DEBUG nova.network.neutron [req-ad6524c7-8d31-4b40-9af6-3537876e0243 req-064917d0-1b9a-4d7d-b47a-c31ce6e367a8 service nova] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Refreshing network info cache for port 70b22784-bf3f-4e3e-a446-065ef48c6697 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.332699] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89853b61-668a-48a3-af3d-cd211d5fab1e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.342035] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9f46db-10c8-44f2-82de-4a71edbfee96 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.362844] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8c872e97-44ca-48c9-b7bb-02dca695ad8a could not be found. [ 609.363051] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 609.363282] env[61852]: INFO nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 609.363495] env[61852]: DEBUG oslo.service.loopingcall [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 609.363686] env[61852]: DEBUG nova.compute.manager [-] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 609.363805] env[61852]: DEBUG nova.network.neutron [-] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 609.381227] env[61852]: DEBUG nova.network.neutron [-] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.417030] env[61852]: DEBUG nova.scheduler.client.report [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 609.558286] env[61852]: DEBUG nova.network.neutron [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.852989] env[61852]: DEBUG nova.network.neutron [req-ad6524c7-8d31-4b40-9af6-3537876e0243 req-064917d0-1b9a-4d7d-b47a-c31ce6e367a8 service nova] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.884815] env[61852]: DEBUG nova.network.neutron [-] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.905142] env[61852]: DEBUG nova.network.neutron [req-ad6524c7-8d31-4b40-9af6-3537876e0243 req-064917d0-1b9a-4d7d-b47a-c31ce6e367a8 service nova] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.921695] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.004s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.922367] env[61852]: ERROR nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7df0f793-7d24-48e0-ba34-a70be0d427af, please check neutron logs for more information. [ 609.922367] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Traceback (most recent call last): [ 609.922367] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 609.922367] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] self.driver.spawn(context, instance, image_meta, [ 609.922367] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 609.922367] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 609.922367] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 609.922367] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] vm_ref = self.build_virtual_machine(instance, [ 609.922367] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 609.922367] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] vif_infos = vmwarevif.get_vif_info(self._session, [ 609.922367] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] for vif in network_info: [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] return self._sync_wrapper(fn, *args, **kwargs) [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] self.wait() [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] self[:] = self._gt.wait() [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] return self._exit_event.wait() [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] current.throw(*self._exc) [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 609.922630] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] result = function(*args, **kwargs) [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] return func(*args, **kwargs) [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] raise e [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] nwinfo = self.network_api.allocate_for_instance( [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] created_port_ids = self._update_ports_for_instance( [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] with excutils.save_and_reraise_exception(): [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] self.force_reraise() [ 609.922897] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.923469] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] raise self.value [ 609.923469] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 609.923469] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] updated_port = self._update_port( [ 609.923469] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.923469] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] _ensure_no_port_binding_failure(port) [ 609.923469] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.923469] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] raise exception.PortBindingFailed(port_id=port['id']) [ 609.923469] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] nova.exception.PortBindingFailed: Binding failed for port 7df0f793-7d24-48e0-ba34-a70be0d427af, please check neutron logs for more information. [ 609.923469] env[61852]: ERROR nova.compute.manager [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] [ 609.923469] env[61852]: DEBUG nova.compute.utils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Binding failed for port 7df0f793-7d24-48e0-ba34-a70be0d427af, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 609.924490] env[61852]: DEBUG nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Build of instance b5f994d9-e0aa-4335-8339-df76a1a032ed was re-scheduled: Binding failed for port 7df0f793-7d24-48e0-ba34-a70be0d427af, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 609.924887] env[61852]: DEBUG nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 609.925126] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Acquiring lock "refresh_cache-b5f994d9-e0aa-4335-8339-df76a1a032ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.925308] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Acquired lock "refresh_cache-b5f994d9-e0aa-4335-8339-df76a1a032ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.925493] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 609.926585] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.852s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.061171] env[61852]: INFO nova.compute.manager [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] [instance: e9a7c08d-e021-43d0-b757-6ad0174b4648] Took 1.02 seconds to deallocate network for instance. [ 610.387902] env[61852]: INFO nova.compute.manager [-] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Took 1.02 seconds to deallocate network for instance. [ 610.390146] env[61852]: DEBUG nova.compute.claims [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 610.390328] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.407917] env[61852]: DEBUG oslo_concurrency.lockutils [req-ad6524c7-8d31-4b40-9af6-3537876e0243 req-064917d0-1b9a-4d7d-b47a-c31ce6e367a8 service nova] Releasing lock "refresh_cache-8c872e97-44ca-48c9-b7bb-02dca695ad8a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.408168] env[61852]: DEBUG nova.compute.manager [req-ad6524c7-8d31-4b40-9af6-3537876e0243 req-064917d0-1b9a-4d7d-b47a-c31ce6e367a8 service nova] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Received event network-vif-deleted-70b22784-bf3f-4e3e-a446-065ef48c6697 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 610.449561] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.522427] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.770566] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e0eea0-4baa-4cca-a7d6-16b524d61881 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.778089] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2570ff3-5c9c-4dbe-8810-b0d0102dcfcc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.807246] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0286f3cc-066d-4f87-a3ec-93f99e781e47 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.813949] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eae8d4a-56fa-4cba-8c1d-dd300ab45b24 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.826609] env[61852]: DEBUG nova.compute.provider_tree [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.024714] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Releasing lock "refresh_cache-b5f994d9-e0aa-4335-8339-df76a1a032ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.024974] env[61852]: DEBUG nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 611.025184] env[61852]: DEBUG nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 611.025352] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 611.041917] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.089045] env[61852]: INFO nova.scheduler.client.report [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Deleted allocations for instance e9a7c08d-e021-43d0-b757-6ad0174b4648 [ 611.330049] env[61852]: DEBUG nova.scheduler.client.report [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 611.544627] env[61852]: DEBUG nova.network.neutron [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.597055] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0fd27799-9262-44f7-8e79-7a92e6ba6cdb tempest-ServersWithSpecificFlavorTestJSON-681237491 tempest-ServersWithSpecificFlavorTestJSON-681237491-project-member] Lock "e9a7c08d-e021-43d0-b757-6ad0174b4648" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.092s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.835113] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.908s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.835750] env[61852]: ERROR nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d2838531-972f-47ea-83ea-e4364d4030ee, please check neutron logs for more information. [ 611.835750] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Traceback (most recent call last): [ 611.835750] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 611.835750] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] self.driver.spawn(context, instance, image_meta, [ 611.835750] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 611.835750] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 611.835750] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 611.835750] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] vm_ref = self.build_virtual_machine(instance, [ 611.835750] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 611.835750] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] vif_infos = vmwarevif.get_vif_info(self._session, [ 611.835750] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] for vif in network_info: [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] return self._sync_wrapper(fn, *args, **kwargs) [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] self.wait() [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] self[:] = self._gt.wait() [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] return self._exit_event.wait() [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] current.throw(*self._exc) [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.836181] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] result = function(*args, **kwargs) [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] return func(*args, **kwargs) [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] raise e [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] nwinfo = self.network_api.allocate_for_instance( [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] created_port_ids = self._update_ports_for_instance( [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] with excutils.save_and_reraise_exception(): [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] self.force_reraise() [ 611.836505] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.836832] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] raise self.value [ 611.836832] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.836832] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] updated_port = self._update_port( [ 611.836832] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.836832] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] _ensure_no_port_binding_failure(port) [ 611.836832] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.836832] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] raise exception.PortBindingFailed(port_id=port['id']) [ 611.836832] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] nova.exception.PortBindingFailed: Binding failed for port d2838531-972f-47ea-83ea-e4364d4030ee, please check neutron logs for more information. [ 611.836832] env[61852]: ERROR nova.compute.manager [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] [ 611.836832] env[61852]: DEBUG nova.compute.utils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Binding failed for port d2838531-972f-47ea-83ea-e4364d4030ee, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 611.837865] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.024s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.837950] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.838047] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 611.838336] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.920s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.839787] env[61852]: INFO nova.compute.claims [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 611.842417] env[61852]: DEBUG nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Build of instance db41ed39-0fef-48ea-9197-8d3d8844547a was re-scheduled: Binding failed for port d2838531-972f-47ea-83ea-e4364d4030ee, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 611.842853] env[61852]: DEBUG nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 611.843072] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Acquiring lock "refresh_cache-db41ed39-0fef-48ea-9197-8d3d8844547a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.843234] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Acquired lock "refresh_cache-db41ed39-0fef-48ea-9197-8d3d8844547a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.843426] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.845028] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab48b11-0e5d-460f-b0e5-0f1d7d44f64c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.853673] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5dc1322-69d6-4055-bdde-6ebf57cd5bf4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.868365] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c23702-ca50-4992-ae39-9a0bb65dd43f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.876164] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e581b5-7db2-4958-9445-098bdeabff02 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.905785] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181461MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 611.905955] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.047294] env[61852]: INFO nova.compute.manager [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] [instance: b5f994d9-e0aa-4335-8339-df76a1a032ed] Took 1.02 seconds to deallocate network for instance. [ 612.100472] env[61852]: DEBUG nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 612.376201] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.500895] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.634932] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.005175] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Releasing lock "refresh_cache-db41ed39-0fef-48ea-9197-8d3d8844547a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.005411] env[61852]: DEBUG nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 613.005584] env[61852]: DEBUG nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 613.005756] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 613.024974] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.079215] env[61852]: INFO nova.scheduler.client.report [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Deleted allocations for instance b5f994d9-e0aa-4335-8339-df76a1a032ed [ 613.210667] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd48d227-a1f3-4aa5-86be-3cee9a31273f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.218305] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa01341-58a4-4732-bde6-295280aa70fe {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.248333] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263aa357-9dd8-4f9c-a0d9-899b098655f7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.255632] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0797be8f-2273-407c-81ed-062904e6d9a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.268502] env[61852]: DEBUG nova.compute.provider_tree [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.538866] env[61852]: DEBUG nova.network.neutron [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.589477] env[61852]: DEBUG oslo_concurrency.lockutils [None req-46d40402-cdfd-493b-b1ae-6e688aad56e5 tempest-InstanceActionsV221TestJSON-466633534 tempest-InstanceActionsV221TestJSON-466633534-project-member] Lock "b5f994d9-e0aa-4335-8339-df76a1a032ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.050s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.771191] env[61852]: DEBUG nova.scheduler.client.report [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.038484] env[61852]: INFO nova.compute.manager [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] [instance: db41ed39-0fef-48ea-9197-8d3d8844547a] Took 1.03 seconds to deallocate network for instance. [ 614.093713] env[61852]: DEBUG nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 614.282353] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.282868] env[61852]: DEBUG nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 614.288353] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.456s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.289739] env[61852]: INFO nova.compute.claims [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 614.627605] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.791243] env[61852]: DEBUG nova.compute.utils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 614.796091] env[61852]: DEBUG nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 614.796091] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 614.878622] env[61852]: DEBUG nova.policy [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3bcaec95d9224d4ab09272b9cc319d0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd41fbf39314699ac70a890101dcb82', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 615.072195] env[61852]: INFO nova.scheduler.client.report [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Deleted allocations for instance db41ed39-0fef-48ea-9197-8d3d8844547a [ 615.303860] env[61852]: DEBUG nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 615.394177] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Successfully created port: 015caeea-bda3-436c-b559-371e857627aa {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 615.589427] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b534a5c-829f-4a76-9968-31a839c3fd5a tempest-ServerPasswordTestJSON-722867372 tempest-ServerPasswordTestJSON-722867372-project-member] Lock "db41ed39-0fef-48ea-9197-8d3d8844547a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.799s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.741574] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1d4a5e-aaf6-4972-8413-2fdea8cba748 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.754022] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbb0174-ac3b-43b5-a602-07f9ba8724bf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.786178] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4986505d-b1f4-49a2-a858-53c64e8a5f95 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.792900] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffc6536-5078-4761-8912-c236f96fa3fa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.806130] env[61852]: DEBUG nova.compute.provider_tree [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.093141] env[61852]: DEBUG nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 616.294040] env[61852]: DEBUG nova.compute.manager [req-8c6a02ed-9c9d-4933-90de-7e6112f96954 req-7aa949f8-2e9b-40a1-a2f0-ffa0a9632134 service nova] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Received event network-changed-015caeea-bda3-436c-b559-371e857627aa {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 616.294040] env[61852]: DEBUG nova.compute.manager [req-8c6a02ed-9c9d-4933-90de-7e6112f96954 req-7aa949f8-2e9b-40a1-a2f0-ffa0a9632134 service nova] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Refreshing instance network info cache due to event network-changed-015caeea-bda3-436c-b559-371e857627aa. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 616.294040] env[61852]: DEBUG oslo_concurrency.lockutils [req-8c6a02ed-9c9d-4933-90de-7e6112f96954 req-7aa949f8-2e9b-40a1-a2f0-ffa0a9632134 service nova] Acquiring lock "refresh_cache-5d89c8de-69f9-432d-bb64-46d662097463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.294040] env[61852]: DEBUG oslo_concurrency.lockutils [req-8c6a02ed-9c9d-4933-90de-7e6112f96954 req-7aa949f8-2e9b-40a1-a2f0-ffa0a9632134 service nova] Acquired lock "refresh_cache-5d89c8de-69f9-432d-bb64-46d662097463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.294040] env[61852]: DEBUG nova.network.neutron [req-8c6a02ed-9c9d-4933-90de-7e6112f96954 req-7aa949f8-2e9b-40a1-a2f0-ffa0a9632134 service nova] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Refreshing network info cache for port 015caeea-bda3-436c-b559-371e857627aa {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 616.308877] env[61852]: DEBUG nova.scheduler.client.report [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.323077] env[61852]: DEBUG nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 616.359620] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 616.359852] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 616.360028] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 616.360284] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 616.360442] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 616.360593] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 616.360792] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 616.362179] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 616.362389] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 616.362565] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 616.362743] env[61852]: DEBUG nova.virt.hardware [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 616.364109] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d1cbfa-a134-475d-8b08-7a22b156c4c9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.373114] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78c4edd-3cb7-4f54-bf7d-5e8ac92562dc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.547258] env[61852]: ERROR nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 015caeea-bda3-436c-b559-371e857627aa, please check neutron logs for more information. [ 616.547258] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 616.547258] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.547258] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 616.547258] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.547258] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 616.547258] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.547258] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 616.547258] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.547258] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 616.547258] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.547258] env[61852]: ERROR nova.compute.manager raise self.value [ 616.547258] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.547258] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 616.547258] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.547258] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 616.547705] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.547705] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 616.547705] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 015caeea-bda3-436c-b559-371e857627aa, please check neutron logs for more information. [ 616.547705] env[61852]: ERROR nova.compute.manager [ 616.547705] env[61852]: Traceback (most recent call last): [ 616.547705] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 616.547705] env[61852]: listener.cb(fileno) [ 616.547705] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.547705] env[61852]: result = function(*args, **kwargs) [ 616.547705] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 616.547705] env[61852]: return func(*args, **kwargs) [ 616.547705] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.547705] env[61852]: raise e [ 616.547705] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.547705] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 616.547705] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.547705] env[61852]: created_port_ids = self._update_ports_for_instance( [ 616.547705] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.547705] env[61852]: with excutils.save_and_reraise_exception(): [ 616.547705] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.547705] env[61852]: self.force_reraise() [ 616.547705] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.547705] env[61852]: raise self.value [ 616.547705] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.547705] env[61852]: updated_port = self._update_port( [ 616.547705] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.547705] env[61852]: _ensure_no_port_binding_failure(port) [ 616.547705] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.547705] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 616.548370] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 015caeea-bda3-436c-b559-371e857627aa, please check neutron logs for more information. [ 616.548370] env[61852]: Removing descriptor: 19 [ 616.548370] env[61852]: ERROR nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 015caeea-bda3-436c-b559-371e857627aa, please check neutron logs for more information. [ 616.548370] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Traceback (most recent call last): [ 616.548370] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 616.548370] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] yield resources [ 616.548370] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 616.548370] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] self.driver.spawn(context, instance, image_meta, [ 616.548370] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 616.548370] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] self._vmops.spawn(context, instance, image_meta, injected_files, [ 616.548370] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 616.548370] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] vm_ref = self.build_virtual_machine(instance, [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] vif_infos = vmwarevif.get_vif_info(self._session, [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] for vif in network_info: [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] return self._sync_wrapper(fn, *args, **kwargs) [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] self.wait() [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] self[:] = self._gt.wait() [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] return self._exit_event.wait() [ 616.548733] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] result = hub.switch() [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] return self.greenlet.switch() [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] result = function(*args, **kwargs) [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] return func(*args, **kwargs) [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] raise e [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] nwinfo = self.network_api.allocate_for_instance( [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.549078] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] created_port_ids = self._update_ports_for_instance( [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] with excutils.save_and_reraise_exception(): [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] self.force_reraise() [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] raise self.value [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] updated_port = self._update_port( [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] _ensure_no_port_binding_failure(port) [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.549412] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] raise exception.PortBindingFailed(port_id=port['id']) [ 616.549720] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] nova.exception.PortBindingFailed: Binding failed for port 015caeea-bda3-436c-b559-371e857627aa, please check neutron logs for more information. [ 616.549720] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] [ 616.549720] env[61852]: INFO nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Terminating instance [ 616.551031] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Acquiring lock "refresh_cache-5d89c8de-69f9-432d-bb64-46d662097463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.627798] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.817299] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.527s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.817299] env[61852]: DEBUG nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 616.819048] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.394s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.819454] env[61852]: DEBUG nova.objects.instance [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lazy-loading 'resources' on Instance uuid 4ce41dca-63c6-447d-9c0a-00f9966e0093 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 616.821718] env[61852]: DEBUG nova.network.neutron [req-8c6a02ed-9c9d-4933-90de-7e6112f96954 req-7aa949f8-2e9b-40a1-a2f0-ffa0a9632134 service nova] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.977157] env[61852]: DEBUG nova.network.neutron [req-8c6a02ed-9c9d-4933-90de-7e6112f96954 req-7aa949f8-2e9b-40a1-a2f0-ffa0a9632134 service nova] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.323928] env[61852]: DEBUG nova.compute.utils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 617.325619] env[61852]: DEBUG nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 617.325619] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 617.402235] env[61852]: DEBUG nova.policy [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d59efb35818494e85d8dff0e89ad104', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '897e604208c542dd936ee86be2b25d06', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 617.484158] env[61852]: DEBUG oslo_concurrency.lockutils [req-8c6a02ed-9c9d-4933-90de-7e6112f96954 req-7aa949f8-2e9b-40a1-a2f0-ffa0a9632134 service nova] Releasing lock "refresh_cache-5d89c8de-69f9-432d-bb64-46d662097463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.484158] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Acquired lock "refresh_cache-5d89c8de-69f9-432d-bb64-46d662097463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.484158] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.733115] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231e3594-b423-4042-ad44-965fae5f7eb7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.741958] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7ce389-72e7-47f7-8404-eeb6300fc321 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.776623] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09baedd7-6b29-4675-9b18-3fef4fba9235 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.784704] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56539d2-b728-4b5b-ac4b-7e89078a4c1d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.803459] env[61852]: DEBUG nova.compute.provider_tree [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.836026] env[61852]: DEBUG nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 617.861199] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Successfully created port: 440fe0cd-e03c-4dec-937f-cef09e489747 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 618.015116] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.250049] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.308098] env[61852]: DEBUG nova.scheduler.client.report [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 618.664189] env[61852]: DEBUG nova.compute.manager [req-50272cc7-d050-43b2-8c32-171ad4a36dd9 req-1fd1bd47-10bc-4890-8236-259323d47938 service nova] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Received event network-vif-deleted-015caeea-bda3-436c-b559-371e857627aa {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 618.754645] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Releasing lock "refresh_cache-5d89c8de-69f9-432d-bb64-46d662097463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.755106] env[61852]: DEBUG nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 618.755301] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 618.755608] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cdf9ca09-0aa7-4d74-8156-961275420d53 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.767074] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddd872b-c444-405c-8fc6-28ff54d6e32c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.791950] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5d89c8de-69f9-432d-bb64-46d662097463 could not be found. [ 618.791950] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 618.792227] env[61852]: INFO nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Took 0.04 seconds to destroy the instance on the hypervisor. [ 618.792407] env[61852]: DEBUG oslo.service.loopingcall [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 618.792916] env[61852]: DEBUG nova.compute.manager [-] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.793010] env[61852]: DEBUG nova.network.neutron [-] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 618.812759] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.993s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.815705] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.218s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.840251] env[61852]: DEBUG nova.network.neutron [-] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.846951] env[61852]: INFO nova.scheduler.client.report [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Deleted allocations for instance 4ce41dca-63c6-447d-9c0a-00f9966e0093 [ 618.848741] env[61852]: DEBUG nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 618.888668] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.888668] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.888825] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.889050] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.890778] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.890778] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.890778] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.890778] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.890778] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.890992] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.890992] env[61852]: DEBUG nova.virt.hardware [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.891105] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2d21a3-2cc1-4b7d-b132-abe77be0f8de {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.907839] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5185cba-9cdb-470b-b5c2-e32bcb1e2266 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.120679] env[61852]: ERROR nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 440fe0cd-e03c-4dec-937f-cef09e489747, please check neutron logs for more information. [ 619.120679] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 619.120679] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 619.120679] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 619.120679] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.120679] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 619.120679] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.120679] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 619.120679] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.120679] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 619.120679] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.120679] env[61852]: ERROR nova.compute.manager raise self.value [ 619.120679] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.120679] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 619.120679] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.120679] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 619.121141] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.121141] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 619.121141] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 440fe0cd-e03c-4dec-937f-cef09e489747, please check neutron logs for more information. [ 619.121141] env[61852]: ERROR nova.compute.manager [ 619.121141] env[61852]: Traceback (most recent call last): [ 619.121141] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 619.121141] env[61852]: listener.cb(fileno) [ 619.121141] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 619.121141] env[61852]: result = function(*args, **kwargs) [ 619.121141] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 619.121141] env[61852]: return func(*args, **kwargs) [ 619.121141] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 619.121141] env[61852]: raise e [ 619.121141] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 619.121141] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 619.121141] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.121141] env[61852]: created_port_ids = self._update_ports_for_instance( [ 619.121141] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.121141] env[61852]: with excutils.save_and_reraise_exception(): [ 619.121141] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.121141] env[61852]: self.force_reraise() [ 619.121141] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.121141] env[61852]: raise self.value [ 619.121141] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.121141] env[61852]: updated_port = self._update_port( [ 619.121141] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.121141] env[61852]: _ensure_no_port_binding_failure(port) [ 619.121141] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.121141] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 619.121746] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 440fe0cd-e03c-4dec-937f-cef09e489747, please check neutron logs for more information. [ 619.121746] env[61852]: Removing descriptor: 19 [ 619.121746] env[61852]: ERROR nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 440fe0cd-e03c-4dec-937f-cef09e489747, please check neutron logs for more information. [ 619.121746] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Traceback (most recent call last): [ 619.121746] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 619.121746] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] yield resources [ 619.121746] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 619.121746] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] self.driver.spawn(context, instance, image_meta, [ 619.121746] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 619.121746] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 619.121746] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 619.121746] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] vm_ref = self.build_virtual_machine(instance, [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] vif_infos = vmwarevif.get_vif_info(self._session, [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] for vif in network_info: [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] return self._sync_wrapper(fn, *args, **kwargs) [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] self.wait() [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] self[:] = self._gt.wait() [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] return self._exit_event.wait() [ 619.122034] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] result = hub.switch() [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] return self.greenlet.switch() [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] result = function(*args, **kwargs) [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] return func(*args, **kwargs) [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] raise e [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] nwinfo = self.network_api.allocate_for_instance( [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.122317] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] created_port_ids = self._update_ports_for_instance( [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] with excutils.save_and_reraise_exception(): [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] self.force_reraise() [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] raise self.value [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] updated_port = self._update_port( [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] _ensure_no_port_binding_failure(port) [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.122597] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] raise exception.PortBindingFailed(port_id=port['id']) [ 619.122852] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] nova.exception.PortBindingFailed: Binding failed for port 440fe0cd-e03c-4dec-937f-cef09e489747, please check neutron logs for more information. [ 619.122852] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] [ 619.122852] env[61852]: INFO nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Terminating instance [ 619.130611] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Acquiring lock "refresh_cache-ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.130748] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Acquired lock "refresh_cache-ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.130912] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.347314] env[61852]: DEBUG nova.network.neutron [-] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.360857] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7bada15f-bb83-4b78-998b-bf945a801a3f tempest-ServerDiagnosticsV248Test-1422852949 tempest-ServerDiagnosticsV248Test-1422852949-project-member] Lock "4ce41dca-63c6-447d-9c0a-00f9966e0093" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.638s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.658866] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.715161] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9299a063-8b98-4405-b5b8-dfb39304f835 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.337179] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.338586] env[61852]: INFO nova.compute.manager [-] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Took 1.55 seconds to deallocate network for instance. [ 620.341701] env[61852]: DEBUG nova.compute.claims [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 620.341869] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.345435] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c479012-96a5-4a45-97b9-69fc99d45d0e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.375710] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f185e3b-55f1-4969-904d-4f4e68b92f66 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.382802] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22c7833-0885-4064-bb36-2f3d0c6c91d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.396332] env[61852]: DEBUG nova.compute.provider_tree [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.766378] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "5992f657-c29e-4da5-98f1-286a384ca0cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.766584] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "5992f657-c29e-4da5-98f1-286a384ca0cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.767449] env[61852]: DEBUG nova.compute.manager [req-23b2e38d-6c8d-41fc-b49c-6e242a1bde59 req-c481086a-f356-4e9c-9ed2-af34a9e7307d service nova] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Received event network-changed-440fe0cd-e03c-4dec-937f-cef09e489747 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 620.768080] env[61852]: DEBUG nova.compute.manager [req-23b2e38d-6c8d-41fc-b49c-6e242a1bde59 req-c481086a-f356-4e9c-9ed2-af34a9e7307d service nova] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Refreshing instance network info cache due to event network-changed-440fe0cd-e03c-4dec-937f-cef09e489747. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 620.768080] env[61852]: DEBUG oslo_concurrency.lockutils [req-23b2e38d-6c8d-41fc-b49c-6e242a1bde59 req-c481086a-f356-4e9c-9ed2-af34a9e7307d service nova] Acquiring lock "refresh_cache-ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.792105] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "d7ca3eac-9738-483a-ae14-67e17929a251" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.792364] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "d7ca3eac-9738-483a-ae14-67e17929a251" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.841890] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Releasing lock "refresh_cache-ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.841890] env[61852]: DEBUG nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 620.841890] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 620.842069] env[61852]: DEBUG oslo_concurrency.lockutils [req-23b2e38d-6c8d-41fc-b49c-6e242a1bde59 req-c481086a-f356-4e9c-9ed2-af34a9e7307d service nova] Acquired lock "refresh_cache-ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.842206] env[61852]: DEBUG nova.network.neutron [req-23b2e38d-6c8d-41fc-b49c-6e242a1bde59 req-c481086a-f356-4e9c-9ed2-af34a9e7307d service nova] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Refreshing network info cache for port 440fe0cd-e03c-4dec-937f-cef09e489747 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 620.843352] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5597c5ba-7b56-4042-93a3-799ca924c988 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.853073] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737237dc-ff06-41a8-8a61-0fc6712e63d8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.882765] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ab92661d-d5e3-4e7a-b6c3-48d48bf795b3 could not be found. [ 620.882950] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.883233] env[61852]: INFO nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 620.883507] env[61852]: DEBUG oslo.service.loopingcall [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.883943] env[61852]: DEBUG nova.compute.manager [-] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 620.884055] env[61852]: DEBUG nova.network.neutron [-] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.902265] env[61852]: DEBUG nova.scheduler.client.report [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 620.906994] env[61852]: DEBUG nova.network.neutron [-] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.371870] env[61852]: DEBUG nova.network.neutron [req-23b2e38d-6c8d-41fc-b49c-6e242a1bde59 req-c481086a-f356-4e9c-9ed2-af34a9e7307d service nova] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.409333] env[61852]: DEBUG nova.network.neutron [-] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.411090] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.596s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.411693] env[61852]: ERROR nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b, please check neutron logs for more information. [ 621.411693] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Traceback (most recent call last): [ 621.411693] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 621.411693] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] self.driver.spawn(context, instance, image_meta, [ 621.411693] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 621.411693] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 621.411693] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 621.411693] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] vm_ref = self.build_virtual_machine(instance, [ 621.411693] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 621.411693] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] vif_infos = vmwarevif.get_vif_info(self._session, [ 621.411693] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] for vif in network_info: [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] return self._sync_wrapper(fn, *args, **kwargs) [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] self.wait() [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] self[:] = self._gt.wait() [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] return self._exit_event.wait() [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] current.throw(*self._exc) [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.412197] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] result = function(*args, **kwargs) [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] return func(*args, **kwargs) [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] raise e [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] nwinfo = self.network_api.allocate_for_instance( [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] created_port_ids = self._update_ports_for_instance( [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] with excutils.save_and_reraise_exception(): [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] self.force_reraise() [ 621.412641] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.412976] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] raise self.value [ 621.412976] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 621.412976] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] updated_port = self._update_port( [ 621.412976] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.412976] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] _ensure_no_port_binding_failure(port) [ 621.412976] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.412976] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] raise exception.PortBindingFailed(port_id=port['id']) [ 621.412976] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] nova.exception.PortBindingFailed: Binding failed for port db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b, please check neutron logs for more information. [ 621.412976] env[61852]: ERROR nova.compute.manager [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] [ 621.414632] env[61852]: DEBUG nova.compute.utils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Binding failed for port db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 621.417471] env[61852]: DEBUG nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Build of instance 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce was re-scheduled: Binding failed for port db25fced-51cb-4f7f-9cd5-1a48f3cc1b3b, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 621.417908] env[61852]: DEBUG nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 621.418148] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Acquiring lock "refresh_cache-2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.418298] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Acquired lock "refresh_cache-2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.418453] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 621.421377] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.199s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.425044] env[61852]: INFO nova.compute.claims [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 621.591315] env[61852]: DEBUG nova.network.neutron [req-23b2e38d-6c8d-41fc-b49c-6e242a1bde59 req-c481086a-f356-4e9c-9ed2-af34a9e7307d service nova] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.921659] env[61852]: INFO nova.compute.manager [-] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Took 1.04 seconds to deallocate network for instance. [ 621.925083] env[61852]: DEBUG nova.compute.claims [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 621.925261] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.948330] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.002783] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.096544] env[61852]: DEBUG oslo_concurrency.lockutils [req-23b2e38d-6c8d-41fc-b49c-6e242a1bde59 req-c481086a-f356-4e9c-9ed2-af34a9e7307d service nova] Releasing lock "refresh_cache-ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.096544] env[61852]: DEBUG nova.compute.manager [req-23b2e38d-6c8d-41fc-b49c-6e242a1bde59 req-c481086a-f356-4e9c-9ed2-af34a9e7307d service nova] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Received event network-vif-deleted-440fe0cd-e03c-4dec-937f-cef09e489747 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 622.507721] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Releasing lock "refresh_cache-2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.508333] env[61852]: DEBUG nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 622.508604] env[61852]: DEBUG nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 622.508881] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 622.527689] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.828709] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9a6fdf-e45a-4b90-86d0-3be12850366e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.836685] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7165d7-8ec6-4a6a-9f7e-eaf12c5ca22f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.872509] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a06740c-bf2e-4e29-bb5b-cff9e22080c8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.881096] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bbb888-1ec0-4c7b-834c-5211d29130a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.896140] env[61852]: DEBUG nova.compute.provider_tree [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.028962] env[61852]: DEBUG nova.network.neutron [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.400621] env[61852]: DEBUG nova.scheduler.client.report [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.532227] env[61852]: INFO nova.compute.manager [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] [instance: 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce] Took 1.02 seconds to deallocate network for instance. [ 623.906842] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.485s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.907063] env[61852]: DEBUG nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 623.911199] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.590s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.417288] env[61852]: DEBUG nova.compute.utils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 624.424262] env[61852]: DEBUG nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 624.424585] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 624.481330] env[61852]: DEBUG nova.policy [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '088de3ba55b844388a072a6397543765', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9348e1bb3c6d4ababdba3cc38b2c4d64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 624.580054] env[61852]: INFO nova.scheduler.client.report [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Deleted allocations for instance 2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce [ 624.855203] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Successfully created port: 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 624.868421] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d01796d-123d-4ec5-b5da-3f093d492bad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.876428] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9321ff8c-5297-4a0f-bff1-2237c2d1b2c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.910451] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cf8181-6dc8-4288-b1a9-05ebfb007a8f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.918089] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b0c193-0f6d-4263-be1b-62915022f392 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.924209] env[61852]: DEBUG nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 624.934749] env[61852]: DEBUG nova.compute.provider_tree [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.094329] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c277b792-698b-4aaf-94ba-3f4c7860f021 tempest-TenantUsagesTestJSON-1928343518 tempest-TenantUsagesTestJSON-1928343518-project-member] Lock "2d4e9acd-2fe7-4fa4-a8d6-135e8a973bce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.677s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.445604] env[61852]: DEBUG nova.scheduler.client.report [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 625.599074] env[61852]: DEBUG nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 625.663992] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Acquiring lock "bd549d69-403b-4c5c-9e08-0c84d32a7c0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.664442] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Lock "bd549d69-403b-4c5c-9e08-0c84d32a7c0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.794890] env[61852]: DEBUG nova.compute.manager [req-078752a5-7e3d-46b5-a9fa-572c2192e5e9 req-7c913700-486a-466f-99b9-19cadee01539 service nova] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Received event network-changed-78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 625.794890] env[61852]: DEBUG nova.compute.manager [req-078752a5-7e3d-46b5-a9fa-572c2192e5e9 req-7c913700-486a-466f-99b9-19cadee01539 service nova] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Refreshing instance network info cache due to event network-changed-78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 625.794985] env[61852]: DEBUG oslo_concurrency.lockutils [req-078752a5-7e3d-46b5-a9fa-572c2192e5e9 req-7c913700-486a-466f-99b9-19cadee01539 service nova] Acquiring lock "refresh_cache-23f221fd-8f76-4a6f-8189-49d9be9da7e2" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.795185] env[61852]: DEBUG oslo_concurrency.lockutils [req-078752a5-7e3d-46b5-a9fa-572c2192e5e9 req-7c913700-486a-466f-99b9-19cadee01539 service nova] Acquired lock "refresh_cache-23f221fd-8f76-4a6f-8189-49d9be9da7e2" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.795354] env[61852]: DEBUG nova.network.neutron [req-078752a5-7e3d-46b5-a9fa-572c2192e5e9 req-7c913700-486a-466f-99b9-19cadee01539 service nova] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Refreshing network info cache for port 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 625.916980] env[61852]: ERROR nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d, please check neutron logs for more information. [ 625.916980] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 625.916980] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.916980] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 625.916980] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.916980] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 625.916980] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.916980] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 625.916980] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.916980] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 625.916980] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.916980] env[61852]: ERROR nova.compute.manager raise self.value [ 625.916980] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.916980] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 625.916980] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.916980] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 625.917387] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.917387] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 625.917387] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d, please check neutron logs for more information. [ 625.917387] env[61852]: ERROR nova.compute.manager [ 625.917512] env[61852]: Traceback (most recent call last): [ 625.917540] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 625.917540] env[61852]: listener.cb(fileno) [ 625.917540] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.917540] env[61852]: result = function(*args, **kwargs) [ 625.917540] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 625.917540] env[61852]: return func(*args, **kwargs) [ 625.917540] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 625.917540] env[61852]: raise e [ 625.917540] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.917540] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 625.917754] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.917754] env[61852]: created_port_ids = self._update_ports_for_instance( [ 625.917754] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.917754] env[61852]: with excutils.save_and_reraise_exception(): [ 625.917754] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.917754] env[61852]: self.force_reraise() [ 625.917754] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.917754] env[61852]: raise self.value [ 625.917754] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.917754] env[61852]: updated_port = self._update_port( [ 625.917754] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.917754] env[61852]: _ensure_no_port_binding_failure(port) [ 625.917754] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.917754] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 625.917754] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d, please check neutron logs for more information. [ 625.917754] env[61852]: Removing descriptor: 19 [ 625.955014] env[61852]: DEBUG nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 625.961250] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.050s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.962338] env[61852]: ERROR nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d0905238-564c-4155-9c65-582d6c76eed1, please check neutron logs for more information. [ 625.962338] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Traceback (most recent call last): [ 625.962338] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 625.962338] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] self.driver.spawn(context, instance, image_meta, [ 625.962338] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 625.962338] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] self._vmops.spawn(context, instance, image_meta, injected_files, [ 625.962338] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 625.962338] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] vm_ref = self.build_virtual_machine(instance, [ 625.962338] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 625.962338] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] vif_infos = vmwarevif.get_vif_info(self._session, [ 625.962338] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] for vif in network_info: [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] return self._sync_wrapper(fn, *args, **kwargs) [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] self.wait() [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] self[:] = self._gt.wait() [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] return self._exit_event.wait() [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] current.throw(*self._exc) [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.962628] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] result = function(*args, **kwargs) [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] return func(*args, **kwargs) [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] raise e [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] nwinfo = self.network_api.allocate_for_instance( [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] created_port_ids = self._update_ports_for_instance( [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] with excutils.save_and_reraise_exception(): [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] self.force_reraise() [ 625.963074] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.963387] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] raise self.value [ 625.963387] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.963387] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] updated_port = self._update_port( [ 625.963387] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.963387] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] _ensure_no_port_binding_failure(port) [ 625.963387] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.963387] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] raise exception.PortBindingFailed(port_id=port['id']) [ 625.963387] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] nova.exception.PortBindingFailed: Binding failed for port d0905238-564c-4155-9c65-582d6c76eed1, please check neutron logs for more information. [ 625.963387] env[61852]: ERROR nova.compute.manager [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] [ 625.964776] env[61852]: DEBUG nova.compute.utils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Binding failed for port d0905238-564c-4155-9c65-582d6c76eed1, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 625.965286] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.205s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.969433] env[61852]: DEBUG nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Build of instance 0f6293bd-3096-4deb-a388-9a3e8b2e5926 was re-scheduled: Binding failed for port d0905238-564c-4155-9c65-582d6c76eed1, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 625.976070] env[61852]: DEBUG nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 625.976070] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquiring lock "refresh_cache-0f6293bd-3096-4deb-a388-9a3e8b2e5926" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.976070] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquired lock "refresh_cache-0f6293bd-3096-4deb-a388-9a3e8b2e5926" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.976070] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.997497] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 625.998721] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 625.998721] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 625.998721] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 625.998721] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 625.998721] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 625.998853] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 625.998853] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 625.998903] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 626.000218] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 626.000218] env[61852]: DEBUG nova.virt.hardware [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 626.001392] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c420d24-0a5f-45d1-95fe-65d7fe49f94f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.014996] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d9a166-221b-4b4d-8b4f-e778f250326e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.031055] env[61852]: ERROR nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d, please check neutron logs for more information. [ 626.031055] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Traceback (most recent call last): [ 626.031055] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 626.031055] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] yield resources [ 626.031055] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 626.031055] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] self.driver.spawn(context, instance, image_meta, [ 626.031055] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 626.031055] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 626.031055] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 626.031055] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] vm_ref = self.build_virtual_machine(instance, [ 626.031055] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] vif_infos = vmwarevif.get_vif_info(self._session, [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] for vif in network_info: [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] return self._sync_wrapper(fn, *args, **kwargs) [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] self.wait() [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] self[:] = self._gt.wait() [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] return self._exit_event.wait() [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 626.031382] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] current.throw(*self._exc) [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] result = function(*args, **kwargs) [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] return func(*args, **kwargs) [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] raise e [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] nwinfo = self.network_api.allocate_for_instance( [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] created_port_ids = self._update_ports_for_instance( [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] with excutils.save_and_reraise_exception(): [ 626.031696] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] self.force_reraise() [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] raise self.value [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] updated_port = self._update_port( [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] _ensure_no_port_binding_failure(port) [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] raise exception.PortBindingFailed(port_id=port['id']) [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] nova.exception.PortBindingFailed: Binding failed for port 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d, please check neutron logs for more information. [ 626.032038] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] [ 626.033015] env[61852]: INFO nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Terminating instance [ 626.035228] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquiring lock "refresh_cache-23f221fd-8f76-4a6f-8189-49d9be9da7e2" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.124271] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.325712] env[61852]: DEBUG nova.network.neutron [req-078752a5-7e3d-46b5-a9fa-572c2192e5e9 req-7c913700-486a-466f-99b9-19cadee01539 service nova] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.450054] env[61852]: DEBUG nova.network.neutron [req-078752a5-7e3d-46b5-a9fa-572c2192e5e9 req-7c913700-486a-466f-99b9-19cadee01539 service nova] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.498243] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.597509] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.884772] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e0a6be-bd12-45e9-b74a-c36dec22d107 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.892913] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783dd2b8-bb2a-420e-94ac-c8ede050038c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.926531] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6392c389-206c-4024-9223-129237a02b13 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.935187] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f528abdc-542f-4586-aac4-dae86f677891 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.950201] env[61852]: DEBUG nova.compute.provider_tree [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.953843] env[61852]: DEBUG oslo_concurrency.lockutils [req-078752a5-7e3d-46b5-a9fa-572c2192e5e9 req-7c913700-486a-466f-99b9-19cadee01539 service nova] Releasing lock "refresh_cache-23f221fd-8f76-4a6f-8189-49d9be9da7e2" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.954245] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquired lock "refresh_cache-23f221fd-8f76-4a6f-8189-49d9be9da7e2" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.954458] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 627.103525] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Releasing lock "refresh_cache-0f6293bd-3096-4deb-a388-9a3e8b2e5926" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.103525] env[61852]: DEBUG nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 627.103525] env[61852]: DEBUG nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 627.103525] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 627.118632] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.456036] env[61852]: DEBUG nova.scheduler.client.report [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 627.479703] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.622727] env[61852]: DEBUG nova.network.neutron [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.715663] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.943181] env[61852]: DEBUG nova.compute.manager [req-af612302-2c5d-433c-801f-ba4c032edce7 req-6e4d560e-202b-4684-8561-e5e1450c070e service nova] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Received event network-vif-deleted-78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 627.968375] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.003s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.969125] env[61852]: ERROR nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fb2772c4-b414-4d3e-ad70-0798bb712b3d, please check neutron logs for more information. [ 627.969125] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Traceback (most recent call last): [ 627.969125] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 627.969125] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] self.driver.spawn(context, instance, image_meta, [ 627.969125] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 627.969125] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 627.969125] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 627.969125] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] vm_ref = self.build_virtual_machine(instance, [ 627.969125] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 627.969125] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] vif_infos = vmwarevif.get_vif_info(self._session, [ 627.969125] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] for vif in network_info: [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] return self._sync_wrapper(fn, *args, **kwargs) [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] self.wait() [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] self[:] = self._gt.wait() [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] return self._exit_event.wait() [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] result = hub.switch() [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 627.969439] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] return self.greenlet.switch() [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] result = function(*args, **kwargs) [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] return func(*args, **kwargs) [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] raise e [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] nwinfo = self.network_api.allocate_for_instance( [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] created_port_ids = self._update_ports_for_instance( [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] with excutils.save_and_reraise_exception(): [ 627.969762] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] self.force_reraise() [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] raise self.value [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] updated_port = self._update_port( [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] _ensure_no_port_binding_failure(port) [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] raise exception.PortBindingFailed(port_id=port['id']) [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] nova.exception.PortBindingFailed: Binding failed for port fb2772c4-b414-4d3e-ad70-0798bb712b3d, please check neutron logs for more information. [ 627.970070] env[61852]: ERROR nova.compute.manager [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] [ 627.970319] env[61852]: DEBUG nova.compute.utils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Binding failed for port fb2772c4-b414-4d3e-ad70-0798bb712b3d, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 627.970920] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.581s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.973879] env[61852]: DEBUG nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Build of instance 068ced45-4c50-4cfd-bd94-fa1dad29e5b5 was re-scheduled: Binding failed for port fb2772c4-b414-4d3e-ad70-0798bb712b3d, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 627.974330] env[61852]: DEBUG nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 627.974559] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquiring lock "refresh_cache-068ced45-4c50-4cfd-bd94-fa1dad29e5b5" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.974702] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Acquired lock "refresh_cache-068ced45-4c50-4cfd-bd94-fa1dad29e5b5" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.974897] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 628.126534] env[61852]: INFO nova.compute.manager [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 0f6293bd-3096-4deb-a388-9a3e8b2e5926] Took 1.02 seconds to deallocate network for instance. [ 628.218641] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Releasing lock "refresh_cache-23f221fd-8f76-4a6f-8189-49d9be9da7e2" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.219231] env[61852]: DEBUG nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 628.219231] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 628.219606] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e1996eb-8558-4480-aec3-3cfff5ae6a89 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.231860] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc852e5-4d7e-4916-8ee4-db9130c351b9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.257384] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 23f221fd-8f76-4a6f-8189-49d9be9da7e2 could not be found. [ 628.257476] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 628.257606] env[61852]: INFO nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 628.257831] env[61852]: DEBUG oslo.service.loopingcall [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 628.258064] env[61852]: DEBUG nova.compute.manager [-] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 628.258147] env[61852]: DEBUG nova.network.neutron [-] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 628.280975] env[61852]: DEBUG nova.network.neutron [-] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.509577] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.641172] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.783337] env[61852]: DEBUG nova.network.neutron [-] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.893636] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b1ca77-a7d1-4174-95a8-0c68e2dba74f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.902890] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc141ec3-f7c2-4359-934c-3d927044f5b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.933573] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd4f1a8-6f5f-4c2a-b9a6-bc2594336697 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.947285] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f25393d-601a-4c85-8572-de1bc268d163 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.961787] env[61852]: DEBUG nova.compute.provider_tree [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.146834] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Releasing lock "refresh_cache-068ced45-4c50-4cfd-bd94-fa1dad29e5b5" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.147085] env[61852]: DEBUG nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 629.147268] env[61852]: DEBUG nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 629.147433] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 629.166713] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.175242] env[61852]: INFO nova.scheduler.client.report [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Deleted allocations for instance 0f6293bd-3096-4deb-a388-9a3e8b2e5926 [ 629.288113] env[61852]: INFO nova.compute.manager [-] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Took 1.03 seconds to deallocate network for instance. [ 629.288210] env[61852]: DEBUG nova.compute.claims [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 629.288971] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.464697] env[61852]: DEBUG nova.scheduler.client.report [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 629.669879] env[61852]: DEBUG nova.network.neutron [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.683349] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ad4d932f-bf15-40e0-8135-e25d1b1be98e tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "0f6293bd-3096-4deb-a388-9a3e8b2e5926" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.262s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.725324] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "aae42775-cb43-4eee-967a-9ba0bdde7783" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.725556] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "aae42775-cb43-4eee-967a-9ba0bdde7783" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.970175] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.999s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.970826] env[61852]: ERROR nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 70b22784-bf3f-4e3e-a446-065ef48c6697, please check neutron logs for more information. [ 629.970826] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Traceback (most recent call last): [ 629.970826] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 629.970826] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] self.driver.spawn(context, instance, image_meta, [ 629.970826] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 629.970826] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 629.970826] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 629.970826] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] vm_ref = self.build_virtual_machine(instance, [ 629.970826] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 629.970826] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] vif_infos = vmwarevif.get_vif_info(self._session, [ 629.970826] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] for vif in network_info: [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] return self._sync_wrapper(fn, *args, **kwargs) [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] self.wait() [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] self[:] = self._gt.wait() [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] return self._exit_event.wait() [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] result = hub.switch() [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 629.971190] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] return self.greenlet.switch() [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] result = function(*args, **kwargs) [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] return func(*args, **kwargs) [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] raise e [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] nwinfo = self.network_api.allocate_for_instance( [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] created_port_ids = self._update_ports_for_instance( [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] with excutils.save_and_reraise_exception(): [ 629.971531] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] self.force_reraise() [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] raise self.value [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] updated_port = self._update_port( [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] _ensure_no_port_binding_failure(port) [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] raise exception.PortBindingFailed(port_id=port['id']) [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] nova.exception.PortBindingFailed: Binding failed for port 70b22784-bf3f-4e3e-a446-065ef48c6697, please check neutron logs for more information. [ 629.971883] env[61852]: ERROR nova.compute.manager [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] [ 629.972407] env[61852]: DEBUG nova.compute.utils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Binding failed for port 70b22784-bf3f-4e3e-a446-065ef48c6697, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 629.973969] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.068s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.975443] env[61852]: DEBUG nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Build of instance 8c872e97-44ca-48c9-b7bb-02dca695ad8a was re-scheduled: Binding failed for port 70b22784-bf3f-4e3e-a446-065ef48c6697, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 629.975874] env[61852]: DEBUG nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 629.976206] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquiring lock "refresh_cache-8c872e97-44ca-48c9-b7bb-02dca695ad8a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.976239] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Acquired lock "refresh_cache-8c872e97-44ca-48c9-b7bb-02dca695ad8a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.977159] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 630.174555] env[61852]: INFO nova.compute.manager [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] [instance: 068ced45-4c50-4cfd-bd94-fa1dad29e5b5] Took 1.02 seconds to deallocate network for instance. [ 630.188152] env[61852]: DEBUG nova.compute.manager [None req-3123578c-b306-4815-8852-60a87e7ff699 tempest-ServersListShow296Test-1500905678 tempest-ServersListShow296Test-1500905678-project-member] [instance: 39cd0e07-1378-40ae-a406-90c77df15146] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 630.500393] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.550582] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.694866] env[61852]: DEBUG nova.compute.manager [None req-3123578c-b306-4815-8852-60a87e7ff699 tempest-ServersListShow296Test-1500905678 tempest-ServersListShow296Test-1500905678-project-member] [instance: 39cd0e07-1378-40ae-a406-90c77df15146] Instance disappeared before build. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 631.052828] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Releasing lock "refresh_cache-8c872e97-44ca-48c9-b7bb-02dca695ad8a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.053106] env[61852]: DEBUG nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 631.053243] env[61852]: DEBUG nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 631.053409] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 631.070869] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 631.207477] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3123578c-b306-4815-8852-60a87e7ff699 tempest-ServersListShow296Test-1500905678 tempest-ServersListShow296Test-1500905678-project-member] Lock "39cd0e07-1378-40ae-a406-90c77df15146" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.248s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.209626] env[61852]: INFO nova.scheduler.client.report [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Deleted allocations for instance 068ced45-4c50-4cfd-bd94-fa1dad29e5b5 [ 631.511018] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 068ced45-4c50-4cfd-bd94-fa1dad29e5b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 631.574516] env[61852]: DEBUG nova.network.neutron [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.716542] env[61852]: DEBUG nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 631.720199] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1b31b8-a9aa-4d6c-ad7a-5591cd291280 tempest-ServerRescueNegativeTestJSON-2071421526 tempest-ServerRescueNegativeTestJSON-2071421526-project-member] Lock "068ced45-4c50-4cfd-bd94-fa1dad29e5b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.465s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.018661] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8c872e97-44ca-48c9-b7bb-02dca695ad8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 632.018661] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 5d89c8de-69f9-432d-bb64-46d662097463 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 632.018661] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance ab92661d-d5e3-4e7a-b6c3-48d48bf795b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 632.018661] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 23f221fd-8f76-4a6f-8189-49d9be9da7e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 632.078350] env[61852]: INFO nova.compute.manager [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] [instance: 8c872e97-44ca-48c9-b7bb-02dca695ad8a] Took 1.02 seconds to deallocate network for instance. [ 632.224153] env[61852]: DEBUG nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 632.247105] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.521155] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 26aba610-746f-4a3c-988c-bf5ffa44198f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 632.766013] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.025388] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 97c37446-5b86-469a-9b9b-751d0ebea463 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 633.113401] env[61852]: INFO nova.scheduler.client.report [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Deleted allocations for instance 8c872e97-44ca-48c9-b7bb-02dca695ad8a [ 633.528720] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 90251da7-072c-45ff-899b-3fd2e0c06880 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 633.624509] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6c24a21e-24c2-4c9a-92b7-a236904cf5e0 tempest-MigrationsAdminTest-1949443979 tempest-MigrationsAdminTest-1949443979-project-member] Lock "8c872e97-44ca-48c9-b7bb-02dca695ad8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.386s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.033136] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 394a7258-a9e0-4b16-a125-01e8cdfe7026 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 634.132558] env[61852]: DEBUG nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 634.537827] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 634.665361] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.040939] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance b566ea57-9b1a-4869-be7c-9ba579db25dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 635.546972] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 0b213475-347e-42c9-aa16-0abd570d1a3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 636.049855] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 144d5486-d438-4bca-9b68-c414cc1f4659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 636.554343] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance f9e90a57-da19-4b1a-81cb-8a6433e09785 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 637.058432] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 593106da-0c81-448a-b3ba-fd6007dcdd98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 637.562124] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 48b40da3-1efc-4557-a791-e88158338aec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.067033] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 29cb49fe-627a-4f0f-919b-58f764cd63d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.569851] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance beffa800-ff93-4230-be14-f2b906666cc0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.072899] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.576651] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 694889e8-200e-454c-9e87-60521dd044d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.081061] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.585940] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance a77ddc8b-f3b2-4e13-944d-5cafecf59fae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.090594] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 5992f657-c29e-4da5-98f1-286a384ca0cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.593874] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d7ca3eac-9738-483a-ae14-67e17929a251 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 642.102289] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance bd549d69-403b-4c5c-9e08-0c84d32a7c0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 642.606208] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance aae42775-cb43-4eee-967a-9ba0bdde7783 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 642.606690] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 642.606901] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 642.890552] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426b0459-3989-4373-9446-d72e76a76339 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.903679] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c61017e-d00d-4596-b8ae-d8866e8a0728 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.932882] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84d8456-4c6c-46b0-949e-ca9067b2ee42 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.940779] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16040bc4-bf3d-420b-823f-99c136cb7643 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.953470] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.455935] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 643.961963] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 643.962337] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 13.989s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.962461] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.328s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.964046] env[61852]: INFO nova.compute.claims [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.322645] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518d75c5-2ce9-4e5f-a1cd-2d789dbd1173 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.331990] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e1e7de-be8d-4c22-9e79-35c3fae4698a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.361945] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de61f5ac-1218-4646-bca9-dd76128e2278 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.369168] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40eff0e3-d7d8-486c-adab-7f8f9a482a80 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.387766] env[61852]: DEBUG nova.compute.provider_tree [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.891183] env[61852]: DEBUG nova.scheduler.client.report [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 646.396201] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.396894] env[61852]: DEBUG nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 646.399750] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.772s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.401741] env[61852]: INFO nova.compute.claims [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.908068] env[61852]: DEBUG nova.compute.utils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 646.910150] env[61852]: DEBUG nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 646.910493] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 646.958697] env[61852]: DEBUG nova.policy [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f04d129452d4eb79514c52a6972af0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e93a6965a6884292bc56b01f7d54a622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 647.221048] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Successfully created port: ab0e2e07-d739-4bc9-8bbb-a86553a47ada {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.411361] env[61852]: DEBUG nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 647.749216] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43305153-b2da-4aa7-9e70-2c272c286133 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.757788] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d203ba05-3893-4ec1-aaa0-77ce9aff0668 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.794327] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36186b8-1bc7-4207-8085-c5b8aa91839a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.802059] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ba7bed-0627-44f0-a034-dd3f774678a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.815310] env[61852]: DEBUG nova.compute.provider_tree [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.918248] env[61852]: DEBUG nova.compute.manager [req-7270e643-e89f-4be2-862b-4a5c8412c13f req-8feb56ef-f3e0-46e8-850d-ffcc4d161c14 service nova] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Received event network-changed-ab0e2e07-d739-4bc9-8bbb-a86553a47ada {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 647.918426] env[61852]: DEBUG nova.compute.manager [req-7270e643-e89f-4be2-862b-4a5c8412c13f req-8feb56ef-f3e0-46e8-850d-ffcc4d161c14 service nova] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Refreshing instance network info cache due to event network-changed-ab0e2e07-d739-4bc9-8bbb-a86553a47ada. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 647.918652] env[61852]: DEBUG oslo_concurrency.lockutils [req-7270e643-e89f-4be2-862b-4a5c8412c13f req-8feb56ef-f3e0-46e8-850d-ffcc4d161c14 service nova] Acquiring lock "refresh_cache-26aba610-746f-4a3c-988c-bf5ffa44198f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.918803] env[61852]: DEBUG oslo_concurrency.lockutils [req-7270e643-e89f-4be2-862b-4a5c8412c13f req-8feb56ef-f3e0-46e8-850d-ffcc4d161c14 service nova] Acquired lock "refresh_cache-26aba610-746f-4a3c-988c-bf5ffa44198f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.918958] env[61852]: DEBUG nova.network.neutron [req-7270e643-e89f-4be2-862b-4a5c8412c13f req-8feb56ef-f3e0-46e8-850d-ffcc4d161c14 service nova] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Refreshing network info cache for port ab0e2e07-d739-4bc9-8bbb-a86553a47ada {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 648.126674] env[61852]: ERROR nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ab0e2e07-d739-4bc9-8bbb-a86553a47ada, please check neutron logs for more information. [ 648.126674] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 648.126674] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 648.126674] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 648.126674] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 648.126674] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 648.126674] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 648.126674] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 648.126674] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.126674] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 648.126674] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.126674] env[61852]: ERROR nova.compute.manager raise self.value [ 648.126674] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 648.126674] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 648.126674] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.126674] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 648.127119] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.127119] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 648.127119] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ab0e2e07-d739-4bc9-8bbb-a86553a47ada, please check neutron logs for more information. [ 648.127119] env[61852]: ERROR nova.compute.manager [ 648.127119] env[61852]: Traceback (most recent call last): [ 648.127119] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 648.127119] env[61852]: listener.cb(fileno) [ 648.127119] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 648.127119] env[61852]: result = function(*args, **kwargs) [ 648.127119] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 648.127119] env[61852]: return func(*args, **kwargs) [ 648.127119] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 648.127119] env[61852]: raise e [ 648.127119] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 648.127119] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 648.127119] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 648.127119] env[61852]: created_port_ids = self._update_ports_for_instance( [ 648.127119] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 648.127119] env[61852]: with excutils.save_and_reraise_exception(): [ 648.127119] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.127119] env[61852]: self.force_reraise() [ 648.127119] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.127119] env[61852]: raise self.value [ 648.127119] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 648.127119] env[61852]: updated_port = self._update_port( [ 648.127119] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.127119] env[61852]: _ensure_no_port_binding_failure(port) [ 648.127119] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.127119] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 648.128045] env[61852]: nova.exception.PortBindingFailed: Binding failed for port ab0e2e07-d739-4bc9-8bbb-a86553a47ada, please check neutron logs for more information. [ 648.128045] env[61852]: Removing descriptor: 19 [ 648.318038] env[61852]: DEBUG nova.scheduler.client.report [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 648.430312] env[61852]: DEBUG nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 648.437145] env[61852]: DEBUG nova.network.neutron [req-7270e643-e89f-4be2-862b-4a5c8412c13f req-8feb56ef-f3e0-46e8-850d-ffcc4d161c14 service nova] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.465748] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 648.465989] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 648.466162] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 648.466345] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 648.466489] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 648.466724] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 648.467154] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 648.467251] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 648.467424] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 648.467659] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 648.467862] env[61852]: DEBUG nova.virt.hardware [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 648.468730] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32407e1-484e-43a9-9ed8-666ed1e172b9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.478793] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586d68db-36f2-4212-96ef-b898e42d4302 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.492903] env[61852]: ERROR nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ab0e2e07-d739-4bc9-8bbb-a86553a47ada, please check neutron logs for more information. [ 648.492903] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Traceback (most recent call last): [ 648.492903] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 648.492903] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] yield resources [ 648.492903] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 648.492903] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] self.driver.spawn(context, instance, image_meta, [ 648.492903] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 648.492903] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 648.492903] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 648.492903] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] vm_ref = self.build_virtual_machine(instance, [ 648.492903] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] vif_infos = vmwarevif.get_vif_info(self._session, [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] for vif in network_info: [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] return self._sync_wrapper(fn, *args, **kwargs) [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] self.wait() [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] self[:] = self._gt.wait() [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] return self._exit_event.wait() [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 648.493535] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] current.throw(*self._exc) [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] result = function(*args, **kwargs) [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] return func(*args, **kwargs) [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] raise e [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] nwinfo = self.network_api.allocate_for_instance( [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] created_port_ids = self._update_ports_for_instance( [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] with excutils.save_and_reraise_exception(): [ 648.494107] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] self.force_reraise() [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] raise self.value [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] updated_port = self._update_port( [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] _ensure_no_port_binding_failure(port) [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] raise exception.PortBindingFailed(port_id=port['id']) [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] nova.exception.PortBindingFailed: Binding failed for port ab0e2e07-d739-4bc9-8bbb-a86553a47ada, please check neutron logs for more information. [ 648.494646] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] [ 648.494646] env[61852]: INFO nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Terminating instance [ 648.495954] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-26aba610-746f-4a3c-988c-bf5ffa44198f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.514786] env[61852]: DEBUG nova.network.neutron [req-7270e643-e89f-4be2-862b-4a5c8412c13f req-8feb56ef-f3e0-46e8-850d-ffcc4d161c14 service nova] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.824254] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.423s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.824254] env[61852]: DEBUG nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 648.829838] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.203s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.832955] env[61852]: INFO nova.compute.claims [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.016983] env[61852]: DEBUG oslo_concurrency.lockutils [req-7270e643-e89f-4be2-862b-4a5c8412c13f req-8feb56ef-f3e0-46e8-850d-ffcc4d161c14 service nova] Releasing lock "refresh_cache-26aba610-746f-4a3c-988c-bf5ffa44198f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.017429] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-26aba610-746f-4a3c-988c-bf5ffa44198f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.017623] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.335971] env[61852]: DEBUG nova.compute.utils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 649.340254] env[61852]: DEBUG nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 649.340254] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 649.395656] env[61852]: DEBUG nova.policy [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81d125f065c14e5b8cac15b8962b736f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54956a8089e546438e7ab02f9dac85a0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 649.540915] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.657812] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.695924] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Successfully created port: 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.840736] env[61852]: DEBUG nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 650.162306] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-26aba610-746f-4a3c-988c-bf5ffa44198f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.162419] env[61852]: DEBUG nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 650.162723] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 650.167777] env[61852]: DEBUG nova.compute.manager [req-2522fcc3-15bc-433d-91fa-dc4128a0e49c req-965edcd1-ec6b-4f89-9dec-e321805b7cd8 service nova] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Received event network-vif-deleted-ab0e2e07-d739-4bc9-8bbb-a86553a47ada {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 650.167777] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87d5dfa1-c9a8-4675-a732-6aa946b9642e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.185175] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce93ac33-6502-463d-bdfe-0df7373b2d62 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.213313] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 26aba610-746f-4a3c-988c-bf5ffa44198f could not be found. [ 650.213313] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 650.213313] env[61852]: INFO nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 650.213596] env[61852]: DEBUG oslo.service.loopingcall [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.219203] env[61852]: DEBUG nova.compute.manager [-] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 650.219203] env[61852]: DEBUG nova.network.neutron [-] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 650.247354] env[61852]: DEBUG nova.network.neutron [-] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.273166] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f257d587-afe8-4ba8-af97-3696935ee246 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.283596] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c403e4a-1477-4c18-bd40-9bd7d316b527 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.317497] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e696c67-3604-4deb-99e4-95104f0c90c6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.326108] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0195e2b9-f016-4af9-8485-082e2be1bd0f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.340046] env[61852]: DEBUG nova.compute.provider_tree [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.685783] env[61852]: ERROR nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe, please check neutron logs for more information. [ 650.685783] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 650.685783] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.685783] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 650.685783] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 650.685783] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 650.685783] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 650.685783] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 650.685783] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.685783] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 650.685783] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.685783] env[61852]: ERROR nova.compute.manager raise self.value [ 650.685783] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 650.685783] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 650.685783] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.685783] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 650.686783] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.686783] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 650.686783] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe, please check neutron logs for more information. [ 650.686783] env[61852]: ERROR nova.compute.manager [ 650.686783] env[61852]: Traceback (most recent call last): [ 650.686783] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 650.686783] env[61852]: listener.cb(fileno) [ 650.686783] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.686783] env[61852]: result = function(*args, **kwargs) [ 650.686783] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 650.686783] env[61852]: return func(*args, **kwargs) [ 650.686783] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.686783] env[61852]: raise e [ 650.686783] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.686783] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 650.686783] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 650.686783] env[61852]: created_port_ids = self._update_ports_for_instance( [ 650.686783] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 650.686783] env[61852]: with excutils.save_and_reraise_exception(): [ 650.686783] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.686783] env[61852]: self.force_reraise() [ 650.686783] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.686783] env[61852]: raise self.value [ 650.686783] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 650.686783] env[61852]: updated_port = self._update_port( [ 650.686783] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.686783] env[61852]: _ensure_no_port_binding_failure(port) [ 650.686783] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.686783] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 650.687550] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe, please check neutron logs for more information. [ 650.687550] env[61852]: Removing descriptor: 19 [ 650.704702] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Acquiring lock "b0e0fcf9-1630-49aa-b053-5498245313b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.704937] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Lock "b0e0fcf9-1630-49aa-b053-5498245313b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.750715] env[61852]: DEBUG nova.network.neutron [-] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.843035] env[61852]: DEBUG nova.scheduler.client.report [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 650.859852] env[61852]: DEBUG nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 650.887203] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 650.887458] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 650.889050] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.889050] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 650.889050] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.889138] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 650.889419] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 650.889628] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 650.889741] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 650.889903] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 650.890089] env[61852]: DEBUG nova.virt.hardware [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 650.891071] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6253476c-d6eb-4811-943e-2875dd2098b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.901509] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b69444a-3b06-4523-b218-6d63323a2ba5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.915356] env[61852]: ERROR nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe, please check neutron logs for more information. [ 650.915356] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Traceback (most recent call last): [ 650.915356] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 650.915356] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] yield resources [ 650.915356] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 650.915356] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] self.driver.spawn(context, instance, image_meta, [ 650.915356] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 650.915356] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] self._vmops.spawn(context, instance, image_meta, injected_files, [ 650.915356] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 650.915356] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] vm_ref = self.build_virtual_machine(instance, [ 650.915356] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] vif_infos = vmwarevif.get_vif_info(self._session, [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] for vif in network_info: [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] return self._sync_wrapper(fn, *args, **kwargs) [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] self.wait() [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] self[:] = self._gt.wait() [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] return self._exit_event.wait() [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 650.916329] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] current.throw(*self._exc) [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] result = function(*args, **kwargs) [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] return func(*args, **kwargs) [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] raise e [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] nwinfo = self.network_api.allocate_for_instance( [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] created_port_ids = self._update_ports_for_instance( [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] with excutils.save_and_reraise_exception(): [ 650.917262] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] self.force_reraise() [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] raise self.value [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] updated_port = self._update_port( [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] _ensure_no_port_binding_failure(port) [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] raise exception.PortBindingFailed(port_id=port['id']) [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] nova.exception.PortBindingFailed: Binding failed for port 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe, please check neutron logs for more information. [ 650.917604] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] [ 650.917604] env[61852]: INFO nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Terminating instance [ 650.918067] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Acquiring lock "refresh_cache-97c37446-5b86-469a-9b9b-751d0ebea463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.918884] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Acquired lock "refresh_cache-97c37446-5b86-469a-9b9b-751d0ebea463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.918884] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 651.253630] env[61852]: INFO nova.compute.manager [-] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Took 1.04 seconds to deallocate network for instance. [ 651.258112] env[61852]: DEBUG nova.compute.claims [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 651.258287] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.349521] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.520s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.349989] env[61852]: DEBUG nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 651.352615] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.011s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.436291] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.647737] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.856810] env[61852]: DEBUG nova.compute.utils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 651.861332] env[61852]: DEBUG nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 651.861665] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 651.904316] env[61852]: DEBUG nova.policy [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06f3a03224034e6a84d2f88b5eb793ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b986f7e1f61b4148ad439f84e0ce28a6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 652.153921] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Releasing lock "refresh_cache-97c37446-5b86-469a-9b9b-751d0ebea463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.153921] env[61852]: DEBUG nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 652.154068] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 652.155166] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1ca9654-83ca-4c95-a119-7a996e97661e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.167167] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8539dc7-cdd3-4961-8846-c01b4aa7e17a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.179148] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Successfully created port: f6966f62-2f69-4005-9d7d-23badccf71d5 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 652.199633] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 97c37446-5b86-469a-9b9b-751d0ebea463 could not be found. [ 652.200778] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 652.200778] env[61852]: INFO nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Took 0.05 seconds to destroy the instance on the hypervisor. [ 652.200778] env[61852]: DEBUG oslo.service.loopingcall [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.200778] env[61852]: DEBUG nova.compute.manager [-] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 652.200778] env[61852]: DEBUG nova.network.neutron [-] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 652.215893] env[61852]: DEBUG nova.compute.manager [req-47e30968-9e4a-4a72-8716-bf395c46d9fa req-93e32afd-5628-4f10-9be6-8346a4f2c59e service nova] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Received event network-changed-7f77d04e-4464-4f53-b82b-bd1dca0bdfbe {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 652.216187] env[61852]: DEBUG nova.compute.manager [req-47e30968-9e4a-4a72-8716-bf395c46d9fa req-93e32afd-5628-4f10-9be6-8346a4f2c59e service nova] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Refreshing instance network info cache due to event network-changed-7f77d04e-4464-4f53-b82b-bd1dca0bdfbe. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 652.216450] env[61852]: DEBUG oslo_concurrency.lockutils [req-47e30968-9e4a-4a72-8716-bf395c46d9fa req-93e32afd-5628-4f10-9be6-8346a4f2c59e service nova] Acquiring lock "refresh_cache-97c37446-5b86-469a-9b9b-751d0ebea463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.216763] env[61852]: DEBUG oslo_concurrency.lockutils [req-47e30968-9e4a-4a72-8716-bf395c46d9fa req-93e32afd-5628-4f10-9be6-8346a4f2c59e service nova] Acquired lock "refresh_cache-97c37446-5b86-469a-9b9b-751d0ebea463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.216989] env[61852]: DEBUG nova.network.neutron [req-47e30968-9e4a-4a72-8716-bf395c46d9fa req-93e32afd-5628-4f10-9be6-8346a4f2c59e service nova] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Refreshing network info cache for port 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 652.218838] env[61852]: DEBUG nova.network.neutron [-] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.239993] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b502c7-8dba-4787-8703-96d415eb7bf3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.248456] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed2b054-b5cb-4820-aac4-e01143a24faa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.286991] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e83637-e2e7-4628-9868-adf655eba2b9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.295208] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8dfc7a4-f49a-465a-969b-892c6075419b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.309032] env[61852]: DEBUG nova.compute.provider_tree [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.362265] env[61852]: DEBUG nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 652.726073] env[61852]: DEBUG nova.network.neutron [-] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.738870] env[61852]: DEBUG nova.network.neutron [req-47e30968-9e4a-4a72-8716-bf395c46d9fa req-93e32afd-5628-4f10-9be6-8346a4f2c59e service nova] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.811863] env[61852]: DEBUG nova.scheduler.client.report [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 652.815513] env[61852]: DEBUG nova.network.neutron [req-47e30968-9e4a-4a72-8716-bf395c46d9fa req-93e32afd-5628-4f10-9be6-8346a4f2c59e service nova] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.039751] env[61852]: ERROR nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f6966f62-2f69-4005-9d7d-23badccf71d5, please check neutron logs for more information. [ 653.039751] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 653.039751] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 653.039751] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 653.039751] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 653.039751] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 653.039751] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 653.039751] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 653.039751] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 653.039751] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 653.039751] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 653.039751] env[61852]: ERROR nova.compute.manager raise self.value [ 653.039751] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 653.039751] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 653.039751] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 653.039751] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 653.040577] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 653.040577] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 653.040577] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f6966f62-2f69-4005-9d7d-23badccf71d5, please check neutron logs for more information. [ 653.040577] env[61852]: ERROR nova.compute.manager [ 653.040577] env[61852]: Traceback (most recent call last): [ 653.040577] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 653.040577] env[61852]: listener.cb(fileno) [ 653.040577] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 653.040577] env[61852]: result = function(*args, **kwargs) [ 653.040577] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 653.040577] env[61852]: return func(*args, **kwargs) [ 653.040577] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 653.040577] env[61852]: raise e [ 653.040577] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 653.040577] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 653.040577] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 653.040577] env[61852]: created_port_ids = self._update_ports_for_instance( [ 653.040577] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 653.040577] env[61852]: with excutils.save_and_reraise_exception(): [ 653.040577] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 653.040577] env[61852]: self.force_reraise() [ 653.040577] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 653.040577] env[61852]: raise self.value [ 653.040577] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 653.040577] env[61852]: updated_port = self._update_port( [ 653.040577] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 653.040577] env[61852]: _ensure_no_port_binding_failure(port) [ 653.040577] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 653.040577] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 653.041716] env[61852]: nova.exception.PortBindingFailed: Binding failed for port f6966f62-2f69-4005-9d7d-23badccf71d5, please check neutron logs for more information. [ 653.041716] env[61852]: Removing descriptor: 19 [ 653.232103] env[61852]: INFO nova.compute.manager [-] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Took 1.03 seconds to deallocate network for instance. [ 653.233356] env[61852]: DEBUG nova.compute.claims [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 653.233832] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.319634] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.965s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.319634] env[61852]: ERROR nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 015caeea-bda3-436c-b559-371e857627aa, please check neutron logs for more information. [ 653.319634] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Traceback (most recent call last): [ 653.319634] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 653.319634] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] self.driver.spawn(context, instance, image_meta, [ 653.319634] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 653.319634] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] self._vmops.spawn(context, instance, image_meta, injected_files, [ 653.319634] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 653.319634] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] vm_ref = self.build_virtual_machine(instance, [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] vif_infos = vmwarevif.get_vif_info(self._session, [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] for vif in network_info: [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] return self._sync_wrapper(fn, *args, **kwargs) [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] self.wait() [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] self[:] = self._gt.wait() [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] return self._exit_event.wait() [ 653.319963] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] result = hub.switch() [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] return self.greenlet.switch() [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] result = function(*args, **kwargs) [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] return func(*args, **kwargs) [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] raise e [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] nwinfo = self.network_api.allocate_for_instance( [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 653.320321] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] created_port_ids = self._update_ports_for_instance( [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] with excutils.save_and_reraise_exception(): [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] self.force_reraise() [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] raise self.value [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] updated_port = self._update_port( [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] _ensure_no_port_binding_failure(port) [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 653.320626] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] raise exception.PortBindingFailed(port_id=port['id']) [ 653.320897] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] nova.exception.PortBindingFailed: Binding failed for port 015caeea-bda3-436c-b559-371e857627aa, please check neutron logs for more information. [ 653.320897] env[61852]: ERROR nova.compute.manager [instance: 5d89c8de-69f9-432d-bb64-46d662097463] [ 653.320897] env[61852]: DEBUG nova.compute.utils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Binding failed for port 015caeea-bda3-436c-b559-371e857627aa, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 653.321954] env[61852]: DEBUG oslo_concurrency.lockutils [req-47e30968-9e4a-4a72-8716-bf395c46d9fa req-93e32afd-5628-4f10-9be6-8346a4f2c59e service nova] Releasing lock "refresh_cache-97c37446-5b86-469a-9b9b-751d0ebea463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.323061] env[61852]: DEBUG nova.compute.manager [req-47e30968-9e4a-4a72-8716-bf395c46d9fa req-93e32afd-5628-4f10-9be6-8346a4f2c59e service nova] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Received event network-vif-deleted-7f77d04e-4464-4f53-b82b-bd1dca0bdfbe {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 653.323061] env[61852]: DEBUG nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Build of instance 5d89c8de-69f9-432d-bb64-46d662097463 was re-scheduled: Binding failed for port 015caeea-bda3-436c-b559-371e857627aa, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 653.323386] env[61852]: DEBUG nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 653.323472] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Acquiring lock "refresh_cache-5d89c8de-69f9-432d-bb64-46d662097463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.323674] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Acquired lock "refresh_cache-5d89c8de-69f9-432d-bb64-46d662097463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.323864] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 653.327594] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.400s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.372914] env[61852]: DEBUG nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 653.396456] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 653.396728] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 653.396892] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 653.397084] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 653.397234] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 653.397380] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 653.397584] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 653.397740] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 653.397900] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 653.398071] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 653.398244] env[61852]: DEBUG nova.virt.hardware [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 653.399389] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8a835e-7042-45d3-b52b-8592239fe9df {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.409546] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fdefc1-3575-402a-92cd-5711d9cefa52 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.425891] env[61852]: ERROR nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f6966f62-2f69-4005-9d7d-23badccf71d5, please check neutron logs for more information. [ 653.425891] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Traceback (most recent call last): [ 653.425891] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 653.425891] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] yield resources [ 653.425891] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 653.425891] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] self.driver.spawn(context, instance, image_meta, [ 653.425891] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 653.425891] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] self._vmops.spawn(context, instance, image_meta, injected_files, [ 653.425891] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 653.425891] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] vm_ref = self.build_virtual_machine(instance, [ 653.425891] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] vif_infos = vmwarevif.get_vif_info(self._session, [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] for vif in network_info: [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] return self._sync_wrapper(fn, *args, **kwargs) [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] self.wait() [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] self[:] = self._gt.wait() [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] return self._exit_event.wait() [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 653.426289] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] current.throw(*self._exc) [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] result = function(*args, **kwargs) [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] return func(*args, **kwargs) [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] raise e [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] nwinfo = self.network_api.allocate_for_instance( [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] created_port_ids = self._update_ports_for_instance( [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] with excutils.save_and_reraise_exception(): [ 653.426625] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] self.force_reraise() [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] raise self.value [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] updated_port = self._update_port( [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] _ensure_no_port_binding_failure(port) [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] raise exception.PortBindingFailed(port_id=port['id']) [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] nova.exception.PortBindingFailed: Binding failed for port f6966f62-2f69-4005-9d7d-23badccf71d5, please check neutron logs for more information. [ 653.426968] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] [ 653.426968] env[61852]: INFO nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Terminating instance [ 653.428315] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Acquiring lock "refresh_cache-90251da7-072c-45ff-899b-3fd2e0c06880" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.428538] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Acquired lock "refresh_cache-90251da7-072c-45ff-899b-3fd2e0c06880" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.428664] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 653.849396] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 653.942339] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.069464] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Acquiring lock "e795b0f0-2c9c-4f44-9058-fbe706873d5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.069746] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Lock "e795b0f0-2c9c-4f44-9058-fbe706873d5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.116284] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.185809] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ad70b9-4954-4037-8831-b13aa9acc2d8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.193801] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b034e5-1475-4f57-a9d1-07e0ad80ac28 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.224865] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.227036] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2c1555-4028-4521-8433-4009e522f74e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.236791] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c84d37f-c74e-48b1-95a9-902b59f06a76 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.245028] env[61852]: DEBUG nova.compute.manager [req-557fc7bf-4bcb-4d12-aba8-6cfdaf51f3b5 req-6d1068ba-3940-486c-9c27-57cb265faec6 service nova] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Received event network-changed-f6966f62-2f69-4005-9d7d-23badccf71d5 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 654.245120] env[61852]: DEBUG nova.compute.manager [req-557fc7bf-4bcb-4d12-aba8-6cfdaf51f3b5 req-6d1068ba-3940-486c-9c27-57cb265faec6 service nova] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Refreshing instance network info cache due to event network-changed-f6966f62-2f69-4005-9d7d-23badccf71d5. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 654.245274] env[61852]: DEBUG oslo_concurrency.lockutils [req-557fc7bf-4bcb-4d12-aba8-6cfdaf51f3b5 req-6d1068ba-3940-486c-9c27-57cb265faec6 service nova] Acquiring lock "refresh_cache-90251da7-072c-45ff-899b-3fd2e0c06880" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.253699] env[61852]: DEBUG nova.compute.provider_tree [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.446862] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Releasing lock "refresh_cache-5d89c8de-69f9-432d-bb64-46d662097463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.447132] env[61852]: DEBUG nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 654.447323] env[61852]: DEBUG nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 654.447488] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 654.461489] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.731052] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Releasing lock "refresh_cache-90251da7-072c-45ff-899b-3fd2e0c06880" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.731664] env[61852]: DEBUG nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 654.731889] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 654.732227] env[61852]: DEBUG oslo_concurrency.lockutils [req-557fc7bf-4bcb-4d12-aba8-6cfdaf51f3b5 req-6d1068ba-3940-486c-9c27-57cb265faec6 service nova] Acquired lock "refresh_cache-90251da7-072c-45ff-899b-3fd2e0c06880" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.732442] env[61852]: DEBUG nova.network.neutron [req-557fc7bf-4bcb-4d12-aba8-6cfdaf51f3b5 req-6d1068ba-3940-486c-9c27-57cb265faec6 service nova] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Refreshing network info cache for port f6966f62-2f69-4005-9d7d-23badccf71d5 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 654.733490] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60243579-448b-4914-b43d-98375dca2f1e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.743478] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30be2bbe-5781-4389-aad4-7833ec3b8c95 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.756352] env[61852]: DEBUG nova.scheduler.client.report [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 654.767301] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 90251da7-072c-45ff-899b-3fd2e0c06880 could not be found. [ 654.768090] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 654.768090] env[61852]: INFO nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Took 0.04 seconds to destroy the instance on the hypervisor. [ 654.768090] env[61852]: DEBUG oslo.service.loopingcall [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 654.768229] env[61852]: DEBUG nova.compute.manager [-] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 654.768229] env[61852]: DEBUG nova.network.neutron [-] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 654.802333] env[61852]: DEBUG nova.network.neutron [-] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.963951] env[61852]: DEBUG nova.network.neutron [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.258012] env[61852]: DEBUG nova.network.neutron [req-557fc7bf-4bcb-4d12-aba8-6cfdaf51f3b5 req-6d1068ba-3940-486c-9c27-57cb265faec6 service nova] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.260789] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.936s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.261470] env[61852]: ERROR nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 440fe0cd-e03c-4dec-937f-cef09e489747, please check neutron logs for more information. [ 655.261470] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Traceback (most recent call last): [ 655.261470] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 655.261470] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] self.driver.spawn(context, instance, image_meta, [ 655.261470] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 655.261470] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 655.261470] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 655.261470] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] vm_ref = self.build_virtual_machine(instance, [ 655.261470] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 655.261470] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] vif_infos = vmwarevif.get_vif_info(self._session, [ 655.261470] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] for vif in network_info: [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] return self._sync_wrapper(fn, *args, **kwargs) [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] self.wait() [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] self[:] = self._gt.wait() [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] return self._exit_event.wait() [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] result = hub.switch() [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 655.261830] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] return self.greenlet.switch() [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] result = function(*args, **kwargs) [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] return func(*args, **kwargs) [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] raise e [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] nwinfo = self.network_api.allocate_for_instance( [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] created_port_ids = self._update_ports_for_instance( [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] with excutils.save_and_reraise_exception(): [ 655.262263] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] self.force_reraise() [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] raise self.value [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] updated_port = self._update_port( [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] _ensure_no_port_binding_failure(port) [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] raise exception.PortBindingFailed(port_id=port['id']) [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] nova.exception.PortBindingFailed: Binding failed for port 440fe0cd-e03c-4dec-937f-cef09e489747, please check neutron logs for more information. [ 655.262645] env[61852]: ERROR nova.compute.manager [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] [ 655.262963] env[61852]: DEBUG nova.compute.utils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Binding failed for port 440fe0cd-e03c-4dec-937f-cef09e489747, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 655.263332] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.139s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.264753] env[61852]: INFO nova.compute.claims [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.267338] env[61852]: DEBUG nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Build of instance ab92661d-d5e3-4e7a-b6c3-48d48bf795b3 was re-scheduled: Binding failed for port 440fe0cd-e03c-4dec-937f-cef09e489747, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 655.267779] env[61852]: DEBUG nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 655.267999] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Acquiring lock "refresh_cache-ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.268159] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Acquired lock "refresh_cache-ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.268315] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.305738] env[61852]: DEBUG nova.network.neutron [-] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.362262] env[61852]: DEBUG nova.network.neutron [req-557fc7bf-4bcb-4d12-aba8-6cfdaf51f3b5 req-6d1068ba-3940-486c-9c27-57cb265faec6 service nova] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.467251] env[61852]: INFO nova.compute.manager [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] [instance: 5d89c8de-69f9-432d-bb64-46d662097463] Took 1.02 seconds to deallocate network for instance. [ 655.804777] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.808110] env[61852]: INFO nova.compute.manager [-] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Took 1.04 seconds to deallocate network for instance. [ 655.810337] env[61852]: DEBUG nova.compute.claims [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 655.810545] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.864581] env[61852]: DEBUG oslo_concurrency.lockutils [req-557fc7bf-4bcb-4d12-aba8-6cfdaf51f3b5 req-6d1068ba-3940-486c-9c27-57cb265faec6 service nova] Releasing lock "refresh_cache-90251da7-072c-45ff-899b-3fd2e0c06880" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.864894] env[61852]: DEBUG nova.compute.manager [req-557fc7bf-4bcb-4d12-aba8-6cfdaf51f3b5 req-6d1068ba-3940-486c-9c27-57cb265faec6 service nova] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Received event network-vif-deleted-f6966f62-2f69-4005-9d7d-23badccf71d5 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 655.869515] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.372348] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Releasing lock "refresh_cache-ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.372348] env[61852]: DEBUG nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 656.372669] env[61852]: DEBUG nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 656.372669] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 656.386276] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.493116] env[61852]: INFO nova.scheduler.client.report [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Deleted allocations for instance 5d89c8de-69f9-432d-bb64-46d662097463 [ 656.564340] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26f4c50-ae94-42ab-bc6b-f043bb64638d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.572680] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c0b3bd-19b1-4b75-9265-a43ecc32be87 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.603195] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4343e40-444c-48d7-a028-02e5dfe9c472 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.611586] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1eb689-d8a6-4bde-8808-8aa30d3838ea {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.626877] env[61852]: DEBUG nova.compute.provider_tree [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.889735] env[61852]: DEBUG nova.network.neutron [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.003188] env[61852]: DEBUG oslo_concurrency.lockutils [None req-703af28a-bb8e-44e3-8f66-5b76dd125867 tempest-ServerMetadataNegativeTestJSON-1299248376 tempest-ServerMetadataNegativeTestJSON-1299248376-project-member] Lock "5d89c8de-69f9-432d-bb64-46d662097463" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.642s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.129501] env[61852]: DEBUG nova.scheduler.client.report [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 657.392584] env[61852]: INFO nova.compute.manager [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] [instance: ab92661d-d5e3-4e7a-b6c3-48d48bf795b3] Took 1.02 seconds to deallocate network for instance. [ 657.506259] env[61852]: DEBUG nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 657.635494] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.636099] env[61852]: DEBUG nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 657.639455] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.351s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.029287] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.146490] env[61852]: DEBUG nova.compute.utils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 658.151448] env[61852]: DEBUG nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 658.151866] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.214879] env[61852]: DEBUG nova.policy [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38ef9094e67a42d193ac8bf04ed81f54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e75cfa60e8a9488687aad0e2e9a39df5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 658.424247] env[61852]: INFO nova.scheduler.client.report [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Deleted allocations for instance ab92661d-d5e3-4e7a-b6c3-48d48bf795b3 [ 658.491795] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e322d22-27ad-4f69-94cc-89c381a2357c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.500271] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66224735-5e41-495c-933d-d8fce6373093 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.542273] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573b3ed9-7210-43b0-bc46-5b3d49d76d60 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.554119] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b874214c-0ee5-4371-877a-86988430a3a4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.567152] env[61852]: DEBUG nova.compute.provider_tree [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.567767] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Successfully created port: d60ae728-6058-4f11-b583-7270bc28e3f7 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 658.652463] env[61852]: DEBUG nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 658.938585] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e1e4338-d478-424a-a2ab-7f1e7b3259b1 tempest-ServerExternalEventsTest-391357440 tempest-ServerExternalEventsTest-391357440-project-member] Lock "ab92661d-d5e3-4e7a-b6c3-48d48bf795b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.093s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.073075] env[61852]: DEBUG nova.scheduler.client.report [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 659.442996] env[61852]: DEBUG nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 659.570057] env[61852]: DEBUG nova.compute.manager [req-d8d0500c-41ea-4b98-a108-454e213ed558 req-30209c53-236f-4b36-91a0-c0a00517dab0 service nova] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Received event network-changed-d60ae728-6058-4f11-b583-7270bc28e3f7 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 659.570246] env[61852]: DEBUG nova.compute.manager [req-d8d0500c-41ea-4b98-a108-454e213ed558 req-30209c53-236f-4b36-91a0-c0a00517dab0 service nova] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Refreshing instance network info cache due to event network-changed-d60ae728-6058-4f11-b583-7270bc28e3f7. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 659.570428] env[61852]: DEBUG oslo_concurrency.lockutils [req-d8d0500c-41ea-4b98-a108-454e213ed558 req-30209c53-236f-4b36-91a0-c0a00517dab0 service nova] Acquiring lock "refresh_cache-394a7258-a9e0-4b16-a125-01e8cdfe7026" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.570606] env[61852]: DEBUG oslo_concurrency.lockutils [req-d8d0500c-41ea-4b98-a108-454e213ed558 req-30209c53-236f-4b36-91a0-c0a00517dab0 service nova] Acquired lock "refresh_cache-394a7258-a9e0-4b16-a125-01e8cdfe7026" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.570781] env[61852]: DEBUG nova.network.neutron [req-d8d0500c-41ea-4b98-a108-454e213ed558 req-30209c53-236f-4b36-91a0-c0a00517dab0 service nova] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Refreshing network info cache for port d60ae728-6058-4f11-b583-7270bc28e3f7 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 659.578307] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.939s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.578855] env[61852]: ERROR nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d, please check neutron logs for more information. [ 659.578855] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Traceback (most recent call last): [ 659.578855] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 659.578855] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] self.driver.spawn(context, instance, image_meta, [ 659.578855] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 659.578855] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 659.578855] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 659.578855] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] vm_ref = self.build_virtual_machine(instance, [ 659.578855] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 659.578855] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] vif_infos = vmwarevif.get_vif_info(self._session, [ 659.578855] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] for vif in network_info: [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] return self._sync_wrapper(fn, *args, **kwargs) [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] self.wait() [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] self[:] = self._gt.wait() [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] return self._exit_event.wait() [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] current.throw(*self._exc) [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.579135] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] result = function(*args, **kwargs) [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] return func(*args, **kwargs) [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] raise e [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] nwinfo = self.network_api.allocate_for_instance( [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] created_port_ids = self._update_ports_for_instance( [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] with excutils.save_and_reraise_exception(): [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] self.force_reraise() [ 659.579445] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.579739] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] raise self.value [ 659.579739] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.579739] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] updated_port = self._update_port( [ 659.579739] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.579739] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] _ensure_no_port_binding_failure(port) [ 659.579739] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.579739] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] raise exception.PortBindingFailed(port_id=port['id']) [ 659.579739] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] nova.exception.PortBindingFailed: Binding failed for port 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d, please check neutron logs for more information. [ 659.579739] env[61852]: ERROR nova.compute.manager [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] [ 659.579739] env[61852]: DEBUG nova.compute.utils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Binding failed for port 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 659.581132] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.334s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.582742] env[61852]: INFO nova.compute.claims [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.585998] env[61852]: DEBUG nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Build of instance 23f221fd-8f76-4a6f-8189-49d9be9da7e2 was re-scheduled: Binding failed for port 78f5a5c3-21b6-4553-b82b-c2b4a5d5d59d, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 659.586985] env[61852]: DEBUG nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 659.586985] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquiring lock "refresh_cache-23f221fd-8f76-4a6f-8189-49d9be9da7e2" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.586985] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquired lock "refresh_cache-23f221fd-8f76-4a6f-8189-49d9be9da7e2" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.586985] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 659.665981] env[61852]: DEBUG nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 659.694990] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 659.695247] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 659.695462] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.695642] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 659.695789] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.695936] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 659.696154] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 659.696319] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 659.696495] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 659.696706] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 659.696825] env[61852]: DEBUG nova.virt.hardware [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 659.697727] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1605535f-41e5-4c9a-90f3-2504aa81df6e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.707214] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee3da24-1ccb-46c1-81bc-f04cd4c8448e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.970036] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.993538] env[61852]: ERROR nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d60ae728-6058-4f11-b583-7270bc28e3f7, please check neutron logs for more information. [ 659.993538] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 659.993538] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 659.993538] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 659.993538] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.993538] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 659.993538] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.993538] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 659.993538] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.993538] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 659.993538] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.993538] env[61852]: ERROR nova.compute.manager raise self.value [ 659.993538] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.993538] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 659.993538] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.993538] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 659.994078] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.994078] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 659.994078] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d60ae728-6058-4f11-b583-7270bc28e3f7, please check neutron logs for more information. [ 659.994078] env[61852]: ERROR nova.compute.manager [ 659.994078] env[61852]: Traceback (most recent call last): [ 659.994078] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 659.994078] env[61852]: listener.cb(fileno) [ 659.994078] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.994078] env[61852]: result = function(*args, **kwargs) [ 659.994078] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 659.994078] env[61852]: return func(*args, **kwargs) [ 659.994078] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 659.994078] env[61852]: raise e [ 659.994078] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 659.994078] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 659.994078] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.994078] env[61852]: created_port_ids = self._update_ports_for_instance( [ 659.994078] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.994078] env[61852]: with excutils.save_and_reraise_exception(): [ 659.994078] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.994078] env[61852]: self.force_reraise() [ 659.994078] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.994078] env[61852]: raise self.value [ 659.994078] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.994078] env[61852]: updated_port = self._update_port( [ 659.994078] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.994078] env[61852]: _ensure_no_port_binding_failure(port) [ 659.994078] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.994078] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 659.994730] env[61852]: nova.exception.PortBindingFailed: Binding failed for port d60ae728-6058-4f11-b583-7270bc28e3f7, please check neutron logs for more information. [ 659.994730] env[61852]: Removing descriptor: 18 [ 659.994730] env[61852]: ERROR nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d60ae728-6058-4f11-b583-7270bc28e3f7, please check neutron logs for more information. [ 659.994730] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Traceback (most recent call last): [ 659.994730] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 659.994730] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] yield resources [ 659.994730] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 659.994730] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] self.driver.spawn(context, instance, image_meta, [ 659.994730] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 659.994730] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] self._vmops.spawn(context, instance, image_meta, injected_files, [ 659.994730] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 659.994730] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] vm_ref = self.build_virtual_machine(instance, [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] vif_infos = vmwarevif.get_vif_info(self._session, [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] for vif in network_info: [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] return self._sync_wrapper(fn, *args, **kwargs) [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] self.wait() [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] self[:] = self._gt.wait() [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] return self._exit_event.wait() [ 659.995020] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] result = hub.switch() [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] return self.greenlet.switch() [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] result = function(*args, **kwargs) [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] return func(*args, **kwargs) [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] raise e [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] nwinfo = self.network_api.allocate_for_instance( [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.995509] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] created_port_ids = self._update_ports_for_instance( [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] with excutils.save_and_reraise_exception(): [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] self.force_reraise() [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] raise self.value [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] updated_port = self._update_port( [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] _ensure_no_port_binding_failure(port) [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.995809] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] raise exception.PortBindingFailed(port_id=port['id']) [ 659.996134] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] nova.exception.PortBindingFailed: Binding failed for port d60ae728-6058-4f11-b583-7270bc28e3f7, please check neutron logs for more information. [ 659.996134] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] [ 659.996134] env[61852]: INFO nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Terminating instance [ 659.997895] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquiring lock "refresh_cache-394a7258-a9e0-4b16-a125-01e8cdfe7026" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.097822] env[61852]: DEBUG nova.network.neutron [req-d8d0500c-41ea-4b98-a108-454e213ed558 req-30209c53-236f-4b36-91a0-c0a00517dab0 service nova] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.112319] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.119264] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "be44214d-72dc-4517-a91a-7f659b5aa897" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.119562] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "be44214d-72dc-4517-a91a-7f659b5aa897" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.194257] env[61852]: DEBUG nova.network.neutron [req-d8d0500c-41ea-4b98-a108-454e213ed558 req-30209c53-236f-4b36-91a0-c0a00517dab0 service nova] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.214962] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.697830] env[61852]: DEBUG oslo_concurrency.lockutils [req-d8d0500c-41ea-4b98-a108-454e213ed558 req-30209c53-236f-4b36-91a0-c0a00517dab0 service nova] Releasing lock "refresh_cache-394a7258-a9e0-4b16-a125-01e8cdfe7026" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.698118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquired lock "refresh_cache-394a7258-a9e0-4b16-a125-01e8cdfe7026" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.698270] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 660.718172] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Releasing lock "refresh_cache-23f221fd-8f76-4a6f-8189-49d9be9da7e2" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.718172] env[61852]: DEBUG nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 660.718172] env[61852]: DEBUG nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 660.718351] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 660.734595] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.899825] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96895bbd-d99b-4712-8ca1-288b9c27cb48 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.908455] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2305f0d2-67e9-442e-874c-259e7212ea6f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.939679] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65faa68b-3b31-404a-a82c-f4eb312b1bc4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.947554] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0dbe57-3c27-4f08-a60b-649647bd7f9a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.961167] env[61852]: DEBUG nova.compute.provider_tree [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.220804] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.239613] env[61852]: DEBUG nova.network.neutron [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.311779] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.467517] env[61852]: DEBUG nova.scheduler.client.report [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 661.595540] env[61852]: DEBUG nova.compute.manager [req-544f1430-5aa8-4dd8-9b40-5b52ca536698 req-fac76a81-b42e-4e4c-953a-224cbe2ebb0b service nova] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Received event network-vif-deleted-d60ae728-6058-4f11-b583-7270bc28e3f7 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 661.746100] env[61852]: INFO nova.compute.manager [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: 23f221fd-8f76-4a6f-8189-49d9be9da7e2] Took 1.03 seconds to deallocate network for instance. [ 661.818027] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Releasing lock "refresh_cache-394a7258-a9e0-4b16-a125-01e8cdfe7026" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.818471] env[61852]: DEBUG nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 661.818664] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.818954] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17000146-d15f-4064-a465-a30ddaa586d1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.828465] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1148bb31-1f80-4a94-9e0a-5510521f7eb0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.851854] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 394a7258-a9e0-4b16-a125-01e8cdfe7026 could not be found. [ 661.852137] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 661.852341] env[61852]: INFO nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Took 0.03 seconds to destroy the instance on the hypervisor. [ 661.852586] env[61852]: DEBUG oslo.service.loopingcall [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 661.852792] env[61852]: DEBUG nova.compute.manager [-] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 661.852885] env[61852]: DEBUG nova.network.neutron [-] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 661.873659] env[61852]: DEBUG nova.network.neutron [-] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.973711] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.974372] env[61852]: DEBUG nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 661.977404] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.212s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.979008] env[61852]: INFO nova.compute.claims [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.379097] env[61852]: DEBUG nova.network.neutron [-] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.485162] env[61852]: DEBUG nova.compute.utils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 662.487118] env[61852]: DEBUG nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 662.487566] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 662.539635] env[61852]: DEBUG nova.policy [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5db98c1126cc41b5930b2e5fa823c330', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '783bc6968c91488293479f10b8dc92c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 662.781223] env[61852]: INFO nova.scheduler.client.report [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Deleted allocations for instance 23f221fd-8f76-4a6f-8189-49d9be9da7e2 [ 662.816062] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Successfully created port: 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.885915] env[61852]: INFO nova.compute.manager [-] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Took 1.03 seconds to deallocate network for instance. [ 662.887841] env[61852]: DEBUG nova.compute.claims [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 662.887841] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.990912] env[61852]: DEBUG nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 663.290635] env[61852]: DEBUG oslo_concurrency.lockutils [None req-08a93566-4534-49d3-8a67-981e8897cca2 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "23f221fd-8f76-4a6f-8189-49d9be9da7e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.529s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.402983] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520e0b0a-6f46-496e-90b0-5f8da892027f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.411442] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b53896-5d53-452a-baff-b1dd9cefe634 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.446618] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad88889-fe45-4c83-be03-c82e921abaf4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.456063] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7436d184-48ac-422d-b91e-6a9ac8cc18d3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.471320] env[61852]: DEBUG nova.compute.provider_tree [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.793626] env[61852]: DEBUG nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 663.978291] env[61852]: DEBUG nova.scheduler.client.report [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 664.006509] env[61852]: DEBUG nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 664.039357] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 664.041365] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 664.041365] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 664.041365] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 664.041365] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 664.041365] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 664.041365] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 664.041717] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 664.041717] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 664.041717] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 664.042819] env[61852]: DEBUG nova.virt.hardware [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 664.043565] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63c4e73-f521-4050-85ba-dd1814ecab35 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.054775] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e921a8-807b-47a2-8455-cd88163ebc5d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.077752] env[61852]: DEBUG nova.compute.manager [req-e77bd34c-fa48-499c-97a4-56f6e1949e22 req-c559e6ea-c35c-4065-a86a-a5dbdb4e864f service nova] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Received event network-changed-0c0dcdbf-4adf-4f9a-b47b-e0f74c024906 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 664.078859] env[61852]: DEBUG nova.compute.manager [req-e77bd34c-fa48-499c-97a4-56f6e1949e22 req-c559e6ea-c35c-4065-a86a-a5dbdb4e864f service nova] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Refreshing instance network info cache due to event network-changed-0c0dcdbf-4adf-4f9a-b47b-e0f74c024906. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 664.078859] env[61852]: DEBUG oslo_concurrency.lockutils [req-e77bd34c-fa48-499c-97a4-56f6e1949e22 req-c559e6ea-c35c-4065-a86a-a5dbdb4e864f service nova] Acquiring lock "refresh_cache-fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.078859] env[61852]: DEBUG oslo_concurrency.lockutils [req-e77bd34c-fa48-499c-97a4-56f6e1949e22 req-c559e6ea-c35c-4065-a86a-a5dbdb4e864f service nova] Acquired lock "refresh_cache-fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.078859] env[61852]: DEBUG nova.network.neutron [req-e77bd34c-fa48-499c-97a4-56f6e1949e22 req-c559e6ea-c35c-4065-a86a-a5dbdb4e864f service nova] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Refreshing network info cache for port 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 664.319768] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.443151] env[61852]: ERROR nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906, please check neutron logs for more information. [ 664.443151] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 664.443151] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 664.443151] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 664.443151] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 664.443151] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 664.443151] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 664.443151] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 664.443151] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.443151] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 664.443151] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.443151] env[61852]: ERROR nova.compute.manager raise self.value [ 664.443151] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 664.443151] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 664.443151] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.443151] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 664.443827] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.443827] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 664.443827] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906, please check neutron logs for more information. [ 664.443827] env[61852]: ERROR nova.compute.manager [ 664.443827] env[61852]: Traceback (most recent call last): [ 664.443827] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 664.443827] env[61852]: listener.cb(fileno) [ 664.443827] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 664.443827] env[61852]: result = function(*args, **kwargs) [ 664.443827] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 664.443827] env[61852]: return func(*args, **kwargs) [ 664.443827] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 664.443827] env[61852]: raise e [ 664.443827] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 664.443827] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 664.443827] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 664.443827] env[61852]: created_port_ids = self._update_ports_for_instance( [ 664.443827] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 664.443827] env[61852]: with excutils.save_and_reraise_exception(): [ 664.443827] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.443827] env[61852]: self.force_reraise() [ 664.443827] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.443827] env[61852]: raise self.value [ 664.443827] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 664.443827] env[61852]: updated_port = self._update_port( [ 664.443827] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.443827] env[61852]: _ensure_no_port_binding_failure(port) [ 664.443827] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.443827] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 664.444752] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906, please check neutron logs for more information. [ 664.444752] env[61852]: Removing descriptor: 19 [ 664.444752] env[61852]: ERROR nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906, please check neutron logs for more information. [ 664.444752] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Traceback (most recent call last): [ 664.444752] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 664.444752] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] yield resources [ 664.444752] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 664.444752] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] self.driver.spawn(context, instance, image_meta, [ 664.444752] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 664.444752] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] self._vmops.spawn(context, instance, image_meta, injected_files, [ 664.444752] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 664.444752] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] vm_ref = self.build_virtual_machine(instance, [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] vif_infos = vmwarevif.get_vif_info(self._session, [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] for vif in network_info: [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] return self._sync_wrapper(fn, *args, **kwargs) [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] self.wait() [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] self[:] = self._gt.wait() [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] return self._exit_event.wait() [ 664.445268] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] result = hub.switch() [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] return self.greenlet.switch() [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] result = function(*args, **kwargs) [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] return func(*args, **kwargs) [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] raise e [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] nwinfo = self.network_api.allocate_for_instance( [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 664.445631] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] created_port_ids = self._update_ports_for_instance( [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] with excutils.save_and_reraise_exception(): [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] self.force_reraise() [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] raise self.value [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] updated_port = self._update_port( [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] _ensure_no_port_binding_failure(port) [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.445980] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] raise exception.PortBindingFailed(port_id=port['id']) [ 664.446333] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] nova.exception.PortBindingFailed: Binding failed for port 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906, please check neutron logs for more information. [ 664.446333] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] [ 664.446333] env[61852]: INFO nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Terminating instance [ 664.447210] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "refresh_cache-fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.486754] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.487282] env[61852]: DEBUG nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 664.489965] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.826s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.491477] env[61852]: INFO nova.compute.claims [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.598799] env[61852]: DEBUG nova.network.neutron [req-e77bd34c-fa48-499c-97a4-56f6e1949e22 req-c559e6ea-c35c-4065-a86a-a5dbdb4e864f service nova] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.694466] env[61852]: DEBUG nova.network.neutron [req-e77bd34c-fa48-499c-97a4-56f6e1949e22 req-c559e6ea-c35c-4065-a86a-a5dbdb4e864f service nova] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.996872] env[61852]: DEBUG nova.compute.utils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 665.000881] env[61852]: DEBUG nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 665.001079] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 665.062350] env[61852]: DEBUG nova.policy [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd238bb978b0342eb8cff25a56610bfc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58c92320bd3a43f0826d38e811f928f0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 665.198810] env[61852]: DEBUG oslo_concurrency.lockutils [req-e77bd34c-fa48-499c-97a4-56f6e1949e22 req-c559e6ea-c35c-4065-a86a-a5dbdb4e864f service nova] Releasing lock "refresh_cache-fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.198810] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "refresh_cache-fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.198810] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.477287] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Successfully created port: 7228a44c-d207-4fcd-9160-bd99a3dcbcad {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.501787] env[61852]: DEBUG nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 665.736657] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.900902] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.047366] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbebd76-fdfc-4e2a-9651-f4fde65255eb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.057170] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a88f29-2154-446d-aa14-780e73bd77da {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.095138] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe9c45e-0ec2-4d93-aa0f-4af94cd2c0b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.104667] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9bc4d3-5f2a-4366-ba96-a6067738941f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.121820] env[61852]: DEBUG nova.compute.provider_tree [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.384725] env[61852]: DEBUG nova.compute.manager [req-067e30ca-de61-4e78-9ae5-38c726b6c128 req-196475c0-9d51-4ba2-91eb-9b21859647cb service nova] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Received event network-vif-deleted-0c0dcdbf-4adf-4f9a-b47b-e0f74c024906 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 666.405872] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "refresh_cache-fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.406822] env[61852]: DEBUG nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 666.406822] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 666.406822] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43b00275-38a0-4945-84e1-6fd4ab39f877 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.418139] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73eb5c9b-c8f6-4673-84f9-b470e428a52f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.444445] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79 could not be found. [ 666.444671] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 666.444851] env[61852]: INFO nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Took 0.04 seconds to destroy the instance on the hypervisor. [ 666.445111] env[61852]: DEBUG oslo.service.loopingcall [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 666.445544] env[61852]: DEBUG nova.compute.manager [-] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 666.445544] env[61852]: DEBUG nova.network.neutron [-] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 666.464995] env[61852]: DEBUG nova.network.neutron [-] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.524938] env[61852]: DEBUG nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 666.548212] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 666.548452] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 666.548608] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.548787] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 666.548928] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.549081] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 666.549272] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 666.549432] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 666.549591] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 666.549747] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 666.549914] env[61852]: DEBUG nova.virt.hardware [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 666.553480] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd44d7b3-fef9-4707-97b4-20112182fad0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.555393] env[61852]: ERROR nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7228a44c-d207-4fcd-9160-bd99a3dcbcad, please check neutron logs for more information. [ 666.555393] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 666.555393] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.555393] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 666.555393] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.555393] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 666.555393] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.555393] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 666.555393] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.555393] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 666.555393] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.555393] env[61852]: ERROR nova.compute.manager raise self.value [ 666.555393] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.555393] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 666.555393] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.555393] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 666.556672] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.556672] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 666.556672] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7228a44c-d207-4fcd-9160-bd99a3dcbcad, please check neutron logs for more information. [ 666.556672] env[61852]: ERROR nova.compute.manager [ 666.556672] env[61852]: Traceback (most recent call last): [ 666.556672] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 666.556672] env[61852]: listener.cb(fileno) [ 666.556672] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.556672] env[61852]: result = function(*args, **kwargs) [ 666.556672] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 666.556672] env[61852]: return func(*args, **kwargs) [ 666.556672] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.556672] env[61852]: raise e [ 666.556672] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.556672] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 666.556672] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.556672] env[61852]: created_port_ids = self._update_ports_for_instance( [ 666.556672] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.556672] env[61852]: with excutils.save_and_reraise_exception(): [ 666.556672] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.556672] env[61852]: self.force_reraise() [ 666.556672] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.556672] env[61852]: raise self.value [ 666.556672] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.556672] env[61852]: updated_port = self._update_port( [ 666.556672] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.556672] env[61852]: _ensure_no_port_binding_failure(port) [ 666.556672] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.556672] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 666.557901] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 7228a44c-d207-4fcd-9160-bd99a3dcbcad, please check neutron logs for more information. [ 666.557901] env[61852]: Removing descriptor: 19 [ 666.559636] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2792a4a-2cb5-45b9-86c1-bbf6bf78c7ef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.575831] env[61852]: ERROR nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7228a44c-d207-4fcd-9160-bd99a3dcbcad, please check neutron logs for more information. [ 666.575831] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Traceback (most recent call last): [ 666.575831] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 666.575831] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] yield resources [ 666.575831] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 666.575831] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] self.driver.spawn(context, instance, image_meta, [ 666.575831] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 666.575831] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.575831] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.575831] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] vm_ref = self.build_virtual_machine(instance, [ 666.575831] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] for vif in network_info: [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] return self._sync_wrapper(fn, *args, **kwargs) [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] self.wait() [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] self[:] = self._gt.wait() [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] return self._exit_event.wait() [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 666.576173] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] current.throw(*self._exc) [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] result = function(*args, **kwargs) [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] return func(*args, **kwargs) [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] raise e [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] nwinfo = self.network_api.allocate_for_instance( [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] created_port_ids = self._update_ports_for_instance( [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] with excutils.save_and_reraise_exception(): [ 666.576488] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] self.force_reraise() [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] raise self.value [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] updated_port = self._update_port( [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] _ensure_no_port_binding_failure(port) [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] raise exception.PortBindingFailed(port_id=port['id']) [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] nova.exception.PortBindingFailed: Binding failed for port 7228a44c-d207-4fcd-9160-bd99a3dcbcad, please check neutron logs for more information. [ 666.576858] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] [ 666.577451] env[61852]: INFO nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Terminating instance [ 666.579467] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Acquiring lock "refresh_cache-b566ea57-9b1a-4869-be7c-9ba579db25dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.579631] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Acquired lock "refresh_cache-b566ea57-9b1a-4869-be7c-9ba579db25dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.579794] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.625048] env[61852]: DEBUG nova.scheduler.client.report [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 666.760848] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquiring lock "d6a46605-aa45-4de3-80a8-cb73b9980669" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.761080] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "d6a46605-aa45-4de3-80a8-cb73b9980669" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.970637] env[61852]: DEBUG nova.network.neutron [-] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.100364] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.129904] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.130754] env[61852]: DEBUG nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 667.136653] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.875s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.177464] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.274493] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "b0433331-f005-49e0-bd22-bc78f970e3cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.274748] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "b0433331-f005-49e0-bd22-bc78f970e3cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.473252] env[61852]: INFO nova.compute.manager [-] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Took 1.03 seconds to deallocate network for instance. [ 667.475720] env[61852]: DEBUG nova.compute.claims [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 667.475899] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.642351] env[61852]: DEBUG nova.compute.utils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 667.644482] env[61852]: DEBUG nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 667.644715] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 667.681476] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Releasing lock "refresh_cache-b566ea57-9b1a-4869-be7c-9ba579db25dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.681855] env[61852]: DEBUG nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 667.685017] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 667.685017] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7cf6d86-5bb0-43be-a245-5290481a2011 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.694694] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ff72df-cc75-441b-a847-282bd1ceee04 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.707754] env[61852]: DEBUG nova.policy [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28f40112ed67470caa856b3c62f5596b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5dbdfddd9c4497dadfe7fb6c1e690fe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 667.723030] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b566ea57-9b1a-4869-be7c-9ba579db25dc could not be found. [ 667.723030] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 667.723169] env[61852]: INFO nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 667.723384] env[61852]: DEBUG oslo.service.loopingcall [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.723610] env[61852]: DEBUG nova.compute.manager [-] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 667.723779] env[61852]: DEBUG nova.network.neutron [-] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.740583] env[61852]: DEBUG nova.network.neutron [-] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.920353] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.920577] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.002856] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Successfully created port: a8ac770f-e371-4335-b908-9d47067c7fd3 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 668.030244] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ed6a0e-010e-420f-b920-5f46f0212eb7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.038086] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9a9089-86cc-46bd-b06e-3446533aac65 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.066963] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069da9a2-2111-42f9-8bf7-9640e95b8ff9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.073956] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5c3e8d-64d9-45b2-80c3-9b30171bffcc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.088281] env[61852]: DEBUG nova.compute.provider_tree [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.153053] env[61852]: DEBUG nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 668.244114] env[61852]: DEBUG nova.network.neutron [-] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.419328] env[61852]: DEBUG nova.compute.manager [req-1b6301c6-594d-48e2-9b4a-eac05486e97d req-cec6f195-e8f1-4616-a45e-6723d43b10f2 service nova] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Received event network-changed-7228a44c-d207-4fcd-9160-bd99a3dcbcad {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 668.419328] env[61852]: DEBUG nova.compute.manager [req-1b6301c6-594d-48e2-9b4a-eac05486e97d req-cec6f195-e8f1-4616-a45e-6723d43b10f2 service nova] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Refreshing instance network info cache due to event network-changed-7228a44c-d207-4fcd-9160-bd99a3dcbcad. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 668.419328] env[61852]: DEBUG oslo_concurrency.lockutils [req-1b6301c6-594d-48e2-9b4a-eac05486e97d req-cec6f195-e8f1-4616-a45e-6723d43b10f2 service nova] Acquiring lock "refresh_cache-b566ea57-9b1a-4869-be7c-9ba579db25dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.419328] env[61852]: DEBUG oslo_concurrency.lockutils [req-1b6301c6-594d-48e2-9b4a-eac05486e97d req-cec6f195-e8f1-4616-a45e-6723d43b10f2 service nova] Acquired lock "refresh_cache-b566ea57-9b1a-4869-be7c-9ba579db25dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.419481] env[61852]: DEBUG nova.network.neutron [req-1b6301c6-594d-48e2-9b4a-eac05486e97d req-cec6f195-e8f1-4616-a45e-6723d43b10f2 service nova] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Refreshing network info cache for port 7228a44c-d207-4fcd-9160-bd99a3dcbcad {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 668.591600] env[61852]: DEBUG nova.scheduler.client.report [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 668.745619] env[61852]: INFO nova.compute.manager [-] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Took 1.02 seconds to deallocate network for instance. [ 668.748877] env[61852]: DEBUG nova.compute.claims [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 668.749149] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.827842] env[61852]: ERROR nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a8ac770f-e371-4335-b908-9d47067c7fd3, please check neutron logs for more information. [ 668.827842] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 668.827842] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 668.827842] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 668.827842] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 668.827842] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 668.827842] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 668.827842] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 668.827842] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.827842] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 668.827842] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.827842] env[61852]: ERROR nova.compute.manager raise self.value [ 668.827842] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 668.827842] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 668.827842] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.827842] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 668.828484] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.828484] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 668.828484] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a8ac770f-e371-4335-b908-9d47067c7fd3, please check neutron logs for more information. [ 668.828484] env[61852]: ERROR nova.compute.manager [ 668.828484] env[61852]: Traceback (most recent call last): [ 668.828484] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 668.828484] env[61852]: listener.cb(fileno) [ 668.828484] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 668.828484] env[61852]: result = function(*args, **kwargs) [ 668.828484] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 668.828484] env[61852]: return func(*args, **kwargs) [ 668.828484] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 668.828484] env[61852]: raise e [ 668.828484] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 668.828484] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 668.828484] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 668.828484] env[61852]: created_port_ids = self._update_ports_for_instance( [ 668.828484] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 668.828484] env[61852]: with excutils.save_and_reraise_exception(): [ 668.828484] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.828484] env[61852]: self.force_reraise() [ 668.828484] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.828484] env[61852]: raise self.value [ 668.828484] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 668.828484] env[61852]: updated_port = self._update_port( [ 668.828484] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.828484] env[61852]: _ensure_no_port_binding_failure(port) [ 668.828484] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.828484] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 668.829628] env[61852]: nova.exception.PortBindingFailed: Binding failed for port a8ac770f-e371-4335-b908-9d47067c7fd3, please check neutron logs for more information. [ 668.829628] env[61852]: Removing descriptor: 19 [ 668.941370] env[61852]: DEBUG nova.network.neutron [req-1b6301c6-594d-48e2-9b4a-eac05486e97d req-cec6f195-e8f1-4616-a45e-6723d43b10f2 service nova] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.016878] env[61852]: DEBUG nova.network.neutron [req-1b6301c6-594d-48e2-9b4a-eac05486e97d req-cec6f195-e8f1-4616-a45e-6723d43b10f2 service nova] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.096019] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.962s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.096635] env[61852]: ERROR nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ab0e2e07-d739-4bc9-8bbb-a86553a47ada, please check neutron logs for more information. [ 669.096635] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Traceback (most recent call last): [ 669.096635] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 669.096635] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] self.driver.spawn(context, instance, image_meta, [ 669.096635] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 669.096635] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 669.096635] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 669.096635] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] vm_ref = self.build_virtual_machine(instance, [ 669.096635] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 669.096635] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] vif_infos = vmwarevif.get_vif_info(self._session, [ 669.096635] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] for vif in network_info: [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] return self._sync_wrapper(fn, *args, **kwargs) [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] self.wait() [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] self[:] = self._gt.wait() [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] return self._exit_event.wait() [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] current.throw(*self._exc) [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 669.096969] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] result = function(*args, **kwargs) [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] return func(*args, **kwargs) [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] raise e [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] nwinfo = self.network_api.allocate_for_instance( [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] created_port_ids = self._update_ports_for_instance( [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] with excutils.save_and_reraise_exception(): [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] self.force_reraise() [ 669.097397] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 669.097761] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] raise self.value [ 669.097761] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 669.097761] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] updated_port = self._update_port( [ 669.097761] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 669.097761] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] _ensure_no_port_binding_failure(port) [ 669.097761] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 669.097761] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] raise exception.PortBindingFailed(port_id=port['id']) [ 669.097761] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] nova.exception.PortBindingFailed: Binding failed for port ab0e2e07-d739-4bc9-8bbb-a86553a47ada, please check neutron logs for more information. [ 669.097761] env[61852]: ERROR nova.compute.manager [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] [ 669.097761] env[61852]: DEBUG nova.compute.utils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Binding failed for port ab0e2e07-d739-4bc9-8bbb-a86553a47ada, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 669.098471] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.865s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.101330] env[61852]: DEBUG nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Build of instance 26aba610-746f-4a3c-988c-bf5ffa44198f was re-scheduled: Binding failed for port ab0e2e07-d739-4bc9-8bbb-a86553a47ada, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 669.101733] env[61852]: DEBUG nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 669.101957] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-26aba610-746f-4a3c-988c-bf5ffa44198f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.102117] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-26aba610-746f-4a3c-988c-bf5ffa44198f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.102273] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 669.162186] env[61852]: DEBUG nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 669.186577] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 669.186820] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 669.186974] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.187168] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 669.187316] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.187461] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 669.187666] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 669.187823] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 669.187985] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 669.188169] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 669.188417] env[61852]: DEBUG nova.virt.hardware [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 669.189334] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f8d4c6-4c8b-4b43-9860-8fb23a3c517f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.197343] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573ce6e9-e035-42a6-bbe5-7912196b7257 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.210992] env[61852]: ERROR nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a8ac770f-e371-4335-b908-9d47067c7fd3, please check neutron logs for more information. [ 669.210992] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Traceback (most recent call last): [ 669.210992] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 669.210992] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] yield resources [ 669.210992] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 669.210992] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] self.driver.spawn(context, instance, image_meta, [ 669.210992] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 669.210992] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] self._vmops.spawn(context, instance, image_meta, injected_files, [ 669.210992] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 669.210992] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] vm_ref = self.build_virtual_machine(instance, [ 669.210992] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] vif_infos = vmwarevif.get_vif_info(self._session, [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] for vif in network_info: [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] return self._sync_wrapper(fn, *args, **kwargs) [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] self.wait() [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] self[:] = self._gt.wait() [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] return self._exit_event.wait() [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 669.211401] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] current.throw(*self._exc) [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] result = function(*args, **kwargs) [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] return func(*args, **kwargs) [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] raise e [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] nwinfo = self.network_api.allocate_for_instance( [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] created_port_ids = self._update_ports_for_instance( [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] with excutils.save_and_reraise_exception(): [ 669.211800] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] self.force_reraise() [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] raise self.value [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] updated_port = self._update_port( [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] _ensure_no_port_binding_failure(port) [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] raise exception.PortBindingFailed(port_id=port['id']) [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] nova.exception.PortBindingFailed: Binding failed for port a8ac770f-e371-4335-b908-9d47067c7fd3, please check neutron logs for more information. [ 669.212207] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] [ 669.212207] env[61852]: INFO nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Terminating instance [ 669.213265] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Acquiring lock "refresh_cache-144d5486-d438-4bca-9b68-c414cc1f4659" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.213426] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Acquired lock "refresh_cache-144d5486-d438-4bca-9b68-c414cc1f4659" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.213588] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 669.520182] env[61852]: DEBUG oslo_concurrency.lockutils [req-1b6301c6-594d-48e2-9b4a-eac05486e97d req-cec6f195-e8f1-4616-a45e-6723d43b10f2 service nova] Releasing lock "refresh_cache-b566ea57-9b1a-4869-be7c-9ba579db25dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.520466] env[61852]: DEBUG nova.compute.manager [req-1b6301c6-594d-48e2-9b4a-eac05486e97d req-cec6f195-e8f1-4616-a45e-6723d43b10f2 service nova] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Received event network-vif-deleted-7228a44c-d207-4fcd-9160-bd99a3dcbcad {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 669.625256] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.710195] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.734191] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.806814] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.962210] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6434e06b-013a-4fbc-b9ff-92ac9317aed8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.969980] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546db69c-d13c-430b-a533-384023c53ff6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.998696] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d04b109-44b6-4d0b-9f00-26140ae9da65 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.006032] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7f8f4c-9d58-4e8b-bd67-a85b2a5fea3d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.018680] env[61852]: DEBUG nova.compute.provider_tree [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.213540] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-26aba610-746f-4a3c-988c-bf5ffa44198f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.213825] env[61852]: DEBUG nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 670.214015] env[61852]: DEBUG nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 670.214199] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.230828] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.310876] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Releasing lock "refresh_cache-144d5486-d438-4bca-9b68-c414cc1f4659" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.311340] env[61852]: DEBUG nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 670.311529] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 670.311814] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a333498c-d385-4c9c-b13e-7eb42d741e74 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.320393] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c526b592-5980-4c40-bdad-21329f2b3312 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.341946] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 144d5486-d438-4bca-9b68-c414cc1f4659 could not be found. [ 670.342191] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 670.342372] env[61852]: INFO nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Took 0.03 seconds to destroy the instance on the hypervisor. [ 670.342599] env[61852]: DEBUG oslo.service.loopingcall [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.342799] env[61852]: DEBUG nova.compute.manager [-] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 670.342891] env[61852]: DEBUG nova.network.neutron [-] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.356939] env[61852]: DEBUG nova.network.neutron [-] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.445732] env[61852]: DEBUG nova.compute.manager [req-aea3e2c7-c870-4fa4-bdef-5588dd10b0d2 req-c8cf2e9d-c869-4c6c-b0a5-6218a4996ceb service nova] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Received event network-changed-a8ac770f-e371-4335-b908-9d47067c7fd3 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 670.445924] env[61852]: DEBUG nova.compute.manager [req-aea3e2c7-c870-4fa4-bdef-5588dd10b0d2 req-c8cf2e9d-c869-4c6c-b0a5-6218a4996ceb service nova] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Refreshing instance network info cache due to event network-changed-a8ac770f-e371-4335-b908-9d47067c7fd3. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 670.446153] env[61852]: DEBUG oslo_concurrency.lockutils [req-aea3e2c7-c870-4fa4-bdef-5588dd10b0d2 req-c8cf2e9d-c869-4c6c-b0a5-6218a4996ceb service nova] Acquiring lock "refresh_cache-144d5486-d438-4bca-9b68-c414cc1f4659" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.446295] env[61852]: DEBUG oslo_concurrency.lockutils [req-aea3e2c7-c870-4fa4-bdef-5588dd10b0d2 req-c8cf2e9d-c869-4c6c-b0a5-6218a4996ceb service nova] Acquired lock "refresh_cache-144d5486-d438-4bca-9b68-c414cc1f4659" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.446451] env[61852]: DEBUG nova.network.neutron [req-aea3e2c7-c870-4fa4-bdef-5588dd10b0d2 req-c8cf2e9d-c869-4c6c-b0a5-6218a4996ceb service nova] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Refreshing network info cache for port a8ac770f-e371-4335-b908-9d47067c7fd3 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 670.523499] env[61852]: DEBUG nova.scheduler.client.report [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.734132] env[61852]: DEBUG nova.network.neutron [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.859192] env[61852]: DEBUG nova.network.neutron [-] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.963497] env[61852]: DEBUG nova.network.neutron [req-aea3e2c7-c870-4fa4-bdef-5588dd10b0d2 req-c8cf2e9d-c869-4c6c-b0a5-6218a4996ceb service nova] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.026492] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.928s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.027132] env[61852]: ERROR nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe, please check neutron logs for more information. [ 671.027132] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Traceback (most recent call last): [ 671.027132] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 671.027132] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] self.driver.spawn(context, instance, image_meta, [ 671.027132] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 671.027132] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] self._vmops.spawn(context, instance, image_meta, injected_files, [ 671.027132] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 671.027132] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] vm_ref = self.build_virtual_machine(instance, [ 671.027132] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 671.027132] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] vif_infos = vmwarevif.get_vif_info(self._session, [ 671.027132] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] for vif in network_info: [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] return self._sync_wrapper(fn, *args, **kwargs) [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] self.wait() [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] self[:] = self._gt.wait() [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] return self._exit_event.wait() [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] current.throw(*self._exc) [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 671.027468] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] result = function(*args, **kwargs) [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] return func(*args, **kwargs) [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] raise e [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] nwinfo = self.network_api.allocate_for_instance( [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] created_port_ids = self._update_ports_for_instance( [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] with excutils.save_and_reraise_exception(): [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] self.force_reraise() [ 671.027788] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.028113] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] raise self.value [ 671.028113] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 671.028113] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] updated_port = self._update_port( [ 671.028113] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.028113] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] _ensure_no_port_binding_failure(port) [ 671.028113] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.028113] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] raise exception.PortBindingFailed(port_id=port['id']) [ 671.028113] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] nova.exception.PortBindingFailed: Binding failed for port 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe, please check neutron logs for more information. [ 671.028113] env[61852]: ERROR nova.compute.manager [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] [ 671.028113] env[61852]: DEBUG nova.compute.utils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Binding failed for port 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 671.029209] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.219s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.032547] env[61852]: DEBUG nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Build of instance 97c37446-5b86-469a-9b9b-751d0ebea463 was re-scheduled: Binding failed for port 7f77d04e-4464-4f53-b82b-bd1dca0bdfbe, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 671.032927] env[61852]: DEBUG nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 671.033155] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Acquiring lock "refresh_cache-97c37446-5b86-469a-9b9b-751d0ebea463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.033356] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Acquired lock "refresh_cache-97c37446-5b86-469a-9b9b-751d0ebea463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.033534] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.035589] env[61852]: DEBUG nova.network.neutron [req-aea3e2c7-c870-4fa4-bdef-5588dd10b0d2 req-c8cf2e9d-c869-4c6c-b0a5-6218a4996ceb service nova] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.237480] env[61852]: INFO nova.compute.manager [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 26aba610-746f-4a3c-988c-bf5ffa44198f] Took 1.02 seconds to deallocate network for instance. [ 671.361819] env[61852]: INFO nova.compute.manager [-] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Took 1.02 seconds to deallocate network for instance. [ 671.364123] env[61852]: DEBUG nova.compute.claims [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 671.364304] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.540239] env[61852]: DEBUG oslo_concurrency.lockutils [req-aea3e2c7-c870-4fa4-bdef-5588dd10b0d2 req-c8cf2e9d-c869-4c6c-b0a5-6218a4996ceb service nova] Releasing lock "refresh_cache-144d5486-d438-4bca-9b68-c414cc1f4659" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.540471] env[61852]: DEBUG nova.compute.manager [req-aea3e2c7-c870-4fa4-bdef-5588dd10b0d2 req-c8cf2e9d-c869-4c6c-b0a5-6218a4996ceb service nova] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Received event network-vif-deleted-a8ac770f-e371-4335-b908-9d47067c7fd3 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 671.555253] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.623617] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.847831] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29261b2d-86fd-41a4-9c0c-7f946461dc64 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.855235] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d892dbb-745a-463c-8b31-e215fcc9ea8e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.884613] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23452a09-205f-4a5e-91e5-b6cc0fe88682 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.891294] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fb14c0-c312-4b43-9425-24d82a5fdbdc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.904489] env[61852]: DEBUG nova.compute.provider_tree [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.126030] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Releasing lock "refresh_cache-97c37446-5b86-469a-9b9b-751d0ebea463" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.127037] env[61852]: DEBUG nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 672.127037] env[61852]: DEBUG nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 672.127037] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 672.141456] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.271497] env[61852]: INFO nova.scheduler.client.report [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Deleted allocations for instance 26aba610-746f-4a3c-988c-bf5ffa44198f [ 672.407436] env[61852]: DEBUG nova.scheduler.client.report [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.644388] env[61852]: DEBUG nova.network.neutron [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.784662] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2acaaf60-a02d-4527-b3a3-07e2091874f1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "26aba610-746f-4a3c-988c-bf5ffa44198f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.250s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.912439] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.883s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.913691] env[61852]: ERROR nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f6966f62-2f69-4005-9d7d-23badccf71d5, please check neutron logs for more information. [ 672.913691] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Traceback (most recent call last): [ 672.913691] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.913691] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] self.driver.spawn(context, instance, image_meta, [ 672.913691] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 672.913691] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.913691] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.913691] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] vm_ref = self.build_virtual_machine(instance, [ 672.913691] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.913691] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.913691] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] for vif in network_info: [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] return self._sync_wrapper(fn, *args, **kwargs) [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] self.wait() [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] self[:] = self._gt.wait() [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] return self._exit_event.wait() [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] current.throw(*self._exc) [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.914189] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] result = function(*args, **kwargs) [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] return func(*args, **kwargs) [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] raise e [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] nwinfo = self.network_api.allocate_for_instance( [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] created_port_ids = self._update_ports_for_instance( [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] with excutils.save_and_reraise_exception(): [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] self.force_reraise() [ 672.914619] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.915089] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] raise self.value [ 672.915089] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 672.915089] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] updated_port = self._update_port( [ 672.915089] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.915089] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] _ensure_no_port_binding_failure(port) [ 672.915089] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.915089] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] raise exception.PortBindingFailed(port_id=port['id']) [ 672.915089] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] nova.exception.PortBindingFailed: Binding failed for port f6966f62-2f69-4005-9d7d-23badccf71d5, please check neutron logs for more information. [ 672.915089] env[61852]: ERROR nova.compute.manager [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] [ 672.915089] env[61852]: DEBUG nova.compute.utils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Binding failed for port f6966f62-2f69-4005-9d7d-23badccf71d5, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 672.915399] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.886s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.916338] env[61852]: INFO nova.compute.claims [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.920791] env[61852]: DEBUG nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Build of instance 90251da7-072c-45ff-899b-3fd2e0c06880 was re-scheduled: Binding failed for port f6966f62-2f69-4005-9d7d-23badccf71d5, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 672.921257] env[61852]: DEBUG nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 672.921486] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Acquiring lock "refresh_cache-90251da7-072c-45ff-899b-3fd2e0c06880" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.921633] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Acquired lock "refresh_cache-90251da7-072c-45ff-899b-3fd2e0c06880" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.921788] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 673.147689] env[61852]: INFO nova.compute.manager [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] [instance: 97c37446-5b86-469a-9b9b-751d0ebea463] Took 1.02 seconds to deallocate network for instance. [ 673.286695] env[61852]: DEBUG nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 673.448574] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.562059] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.808602] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.064973] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Releasing lock "refresh_cache-90251da7-072c-45ff-899b-3fd2e0c06880" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.064973] env[61852]: DEBUG nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 674.064973] env[61852]: DEBUG nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 674.064973] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 674.082836] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.177933] env[61852]: INFO nova.scheduler.client.report [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Deleted allocations for instance 97c37446-5b86-469a-9b9b-751d0ebea463 [ 674.362299] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e38431-2f7a-4d0a-8615-5899b59d03fe {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.369550] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf84fda-237e-4bc7-b953-34d69e2b1669 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.398707] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45441251-f823-4313-b601-152df287b1c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.406010] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b801a687-420c-47bc-9483-a68971b09d18 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.420763] env[61852]: DEBUG nova.compute.provider_tree [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.586199] env[61852]: DEBUG nova.network.neutron [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.688026] env[61852]: DEBUG oslo_concurrency.lockutils [None req-be4c828f-639e-4f30-b67a-2714f489b6e7 tempest-ImagesOneServerTestJSON-1969281479 tempest-ImagesOneServerTestJSON-1969281479-project-member] Lock "97c37446-5b86-469a-9b9b-751d0ebea463" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.925s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.783128] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.783557] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.924605] env[61852]: DEBUG nova.scheduler.client.report [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 675.089214] env[61852]: INFO nova.compute.manager [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] [instance: 90251da7-072c-45ff-899b-3fd2e0c06880] Took 1.02 seconds to deallocate network for instance. [ 675.192072] env[61852]: DEBUG nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 675.430535] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.434111] env[61852]: DEBUG nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 675.436721] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.467s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.442241] env[61852]: INFO nova.compute.claims [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.720411] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.949308] env[61852]: DEBUG nova.compute.utils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 675.954129] env[61852]: DEBUG nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 675.954129] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 676.124461] env[61852]: INFO nova.scheduler.client.report [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Deleted allocations for instance 90251da7-072c-45ff-899b-3fd2e0c06880 [ 676.157104] env[61852]: DEBUG nova.policy [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab568141e9e246dc926f00e931dcd0fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ded6f33f9ea94778969b762158c3deb9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 676.453604] env[61852]: DEBUG nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 676.605157] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Successfully created port: e65885ca-7727-4b4f-b822-b30800674d2a {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 676.637950] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05ea92fa-79ba-4bdf-b965-a91a85246a50 tempest-InstanceActionsTestJSON-854513460 tempest-InstanceActionsTestJSON-854513460-project-member] Lock "90251da7-072c-45ff-899b-3fd2e0c06880" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.450s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.818834] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d140e28-bb6f-4953-99d2-f088411dbc3f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.827738] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6c87b4-4bca-4ccc-a4ea-0b23886bbf85 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.857630] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0c7dc4-bab0-455e-8463-ccfc999ac801 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.865313] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254965e2-34a5-4975-8f35-d148e7a49f93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.878495] env[61852]: DEBUG nova.compute.provider_tree [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.144082] env[61852]: DEBUG nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 677.360229] env[61852]: DEBUG nova.compute.manager [req-753f4712-6b16-4185-9fc0-1387feb7c55d req-593b12c6-9cd2-4d4e-a644-3ee76d1a7ddb service nova] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Received event network-changed-e65885ca-7727-4b4f-b822-b30800674d2a {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 677.360572] env[61852]: DEBUG nova.compute.manager [req-753f4712-6b16-4185-9fc0-1387feb7c55d req-593b12c6-9cd2-4d4e-a644-3ee76d1a7ddb service nova] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Refreshing instance network info cache due to event network-changed-e65885ca-7727-4b4f-b822-b30800674d2a. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 677.361085] env[61852]: DEBUG oslo_concurrency.lockutils [req-753f4712-6b16-4185-9fc0-1387feb7c55d req-593b12c6-9cd2-4d4e-a644-3ee76d1a7ddb service nova] Acquiring lock "refresh_cache-0b213475-347e-42c9-aa16-0abd570d1a3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.361466] env[61852]: DEBUG oslo_concurrency.lockutils [req-753f4712-6b16-4185-9fc0-1387feb7c55d req-593b12c6-9cd2-4d4e-a644-3ee76d1a7ddb service nova] Acquired lock "refresh_cache-0b213475-347e-42c9-aa16-0abd570d1a3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.361744] env[61852]: DEBUG nova.network.neutron [req-753f4712-6b16-4185-9fc0-1387feb7c55d req-593b12c6-9cd2-4d4e-a644-3ee76d1a7ddb service nova] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Refreshing network info cache for port e65885ca-7727-4b4f-b822-b30800674d2a {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 677.385022] env[61852]: DEBUG nova.scheduler.client.report [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 677.465870] env[61852]: DEBUG nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 677.497927] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 677.497927] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 677.497927] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.498122] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 677.498122] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.498122] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 677.498507] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 677.498898] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 677.499204] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 677.499485] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 677.499783] env[61852]: DEBUG nova.virt.hardware [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 677.501104] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08a37d0-c4e9-451d-a648-7470ab7fa6c0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.511619] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97b5237-affe-4be1-8e62-c1b80c0320be {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.655911] env[61852]: ERROR nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e65885ca-7727-4b4f-b822-b30800674d2a, please check neutron logs for more information. [ 677.655911] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 677.655911] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.655911] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 677.655911] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.655911] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 677.655911] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.655911] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 677.655911] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.655911] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 677.655911] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.655911] env[61852]: ERROR nova.compute.manager raise self.value [ 677.655911] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.655911] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 677.655911] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.655911] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 677.657533] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.657533] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 677.657533] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e65885ca-7727-4b4f-b822-b30800674d2a, please check neutron logs for more information. [ 677.657533] env[61852]: ERROR nova.compute.manager [ 677.657533] env[61852]: Traceback (most recent call last): [ 677.657533] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 677.657533] env[61852]: listener.cb(fileno) [ 677.657533] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.657533] env[61852]: result = function(*args, **kwargs) [ 677.657533] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 677.657533] env[61852]: return func(*args, **kwargs) [ 677.657533] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.657533] env[61852]: raise e [ 677.657533] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.657533] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 677.657533] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.657533] env[61852]: created_port_ids = self._update_ports_for_instance( [ 677.657533] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.657533] env[61852]: with excutils.save_and_reraise_exception(): [ 677.657533] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.657533] env[61852]: self.force_reraise() [ 677.657533] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.657533] env[61852]: raise self.value [ 677.657533] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.657533] env[61852]: updated_port = self._update_port( [ 677.657533] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.657533] env[61852]: _ensure_no_port_binding_failure(port) [ 677.657533] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.657533] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 677.658436] env[61852]: nova.exception.PortBindingFailed: Binding failed for port e65885ca-7727-4b4f-b822-b30800674d2a, please check neutron logs for more information. [ 677.658436] env[61852]: Removing descriptor: 19 [ 677.658436] env[61852]: ERROR nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e65885ca-7727-4b4f-b822-b30800674d2a, please check neutron logs for more information. [ 677.658436] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Traceback (most recent call last): [ 677.658436] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 677.658436] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] yield resources [ 677.658436] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 677.658436] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] self.driver.spawn(context, instance, image_meta, [ 677.658436] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 677.658436] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 677.658436] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 677.658436] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] vm_ref = self.build_virtual_machine(instance, [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] for vif in network_info: [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] return self._sync_wrapper(fn, *args, **kwargs) [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] self.wait() [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] self[:] = self._gt.wait() [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] return self._exit_event.wait() [ 677.659043] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] result = hub.switch() [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] return self.greenlet.switch() [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] result = function(*args, **kwargs) [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] return func(*args, **kwargs) [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] raise e [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] nwinfo = self.network_api.allocate_for_instance( [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.659441] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] created_port_ids = self._update_ports_for_instance( [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] with excutils.save_and_reraise_exception(): [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] self.force_reraise() [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] raise self.value [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] updated_port = self._update_port( [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] _ensure_no_port_binding_failure(port) [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.659908] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] raise exception.PortBindingFailed(port_id=port['id']) [ 677.660406] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] nova.exception.PortBindingFailed: Binding failed for port e65885ca-7727-4b4f-b822-b30800674d2a, please check neutron logs for more information. [ 677.660406] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] [ 677.660406] env[61852]: INFO nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Terminating instance [ 677.660406] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Acquiring lock "refresh_cache-0b213475-347e-42c9-aa16-0abd570d1a3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.672661] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.891382] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.893087] env[61852]: DEBUG nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 677.896594] env[61852]: DEBUG nova.network.neutron [req-753f4712-6b16-4185-9fc0-1387feb7c55d req-593b12c6-9cd2-4d4e-a644-3ee76d1a7ddb service nova] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.898486] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.011s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.020178] env[61852]: DEBUG nova.network.neutron [req-753f4712-6b16-4185-9fc0-1387feb7c55d req-593b12c6-9cd2-4d4e-a644-3ee76d1a7ddb service nova] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.403334] env[61852]: DEBUG nova.compute.utils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 678.407569] env[61852]: DEBUG nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 678.407742] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 678.476356] env[61852]: DEBUG nova.policy [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d49d5475d97488c9d7b0cf2b0da9d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed7ffe792fcd4c879a0c76fd6c913b2d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 678.524011] env[61852]: DEBUG oslo_concurrency.lockutils [req-753f4712-6b16-4185-9fc0-1387feb7c55d req-593b12c6-9cd2-4d4e-a644-3ee76d1a7ddb service nova] Releasing lock "refresh_cache-0b213475-347e-42c9-aa16-0abd570d1a3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.524453] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Acquired lock "refresh_cache-0b213475-347e-42c9-aa16-0abd570d1a3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.524641] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.821124] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cb899e-30c1-4c25-aba5-83137860f196 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.829873] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83eb7291-1717-45f9-aace-bdc52e63e960 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.861523] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69369e4-6c5b-4e88-883c-d82188bd5768 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.870378] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f26e93-1486-421c-8b89-278fd6e0d42d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.891295] env[61852]: DEBUG nova.compute.provider_tree [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.910072] env[61852]: DEBUG nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 679.043338] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.072255] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Successfully created port: efce19b8-8baf-4b37-bf7a-35689b6f6462 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.152150] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.393491] env[61852]: DEBUG nova.scheduler.client.report [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 679.399647] env[61852]: DEBUG nova.compute.manager [req-47c5fa1b-86c0-4dfd-888c-5a16fa956d45 req-20651d3c-d07f-4743-99e9-7f3e5c66be0c service nova] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Received event network-vif-deleted-e65885ca-7727-4b4f-b822-b30800674d2a {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 679.653848] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Releasing lock "refresh_cache-0b213475-347e-42c9-aa16-0abd570d1a3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.654297] env[61852]: DEBUG nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 679.654493] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 679.654799] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9602ca7-417d-40c8-bb03-f117ced6b98a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.667377] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5d12d9-766d-417c-942f-288df85aba87 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.691770] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b213475-347e-42c9-aa16-0abd570d1a3e could not be found. [ 679.691885] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.692089] env[61852]: INFO nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 679.692444] env[61852]: DEBUG oslo.service.loopingcall [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 679.692724] env[61852]: DEBUG nova.compute.manager [-] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 679.692827] env[61852]: DEBUG nova.network.neutron [-] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.712018] env[61852]: DEBUG nova.network.neutron [-] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.899340] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.899984] env[61852]: ERROR nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d60ae728-6058-4f11-b583-7270bc28e3f7, please check neutron logs for more information. [ 679.899984] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Traceback (most recent call last): [ 679.899984] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 679.899984] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] self.driver.spawn(context, instance, image_meta, [ 679.899984] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 679.899984] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] self._vmops.spawn(context, instance, image_meta, injected_files, [ 679.899984] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 679.899984] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] vm_ref = self.build_virtual_machine(instance, [ 679.899984] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 679.899984] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] vif_infos = vmwarevif.get_vif_info(self._session, [ 679.899984] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] for vif in network_info: [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] return self._sync_wrapper(fn, *args, **kwargs) [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] self.wait() [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] self[:] = self._gt.wait() [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] return self._exit_event.wait() [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] result = hub.switch() [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 679.900328] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] return self.greenlet.switch() [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] result = function(*args, **kwargs) [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] return func(*args, **kwargs) [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] raise e [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] nwinfo = self.network_api.allocate_for_instance( [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] created_port_ids = self._update_ports_for_instance( [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] with excutils.save_and_reraise_exception(): [ 679.900669] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] self.force_reraise() [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] raise self.value [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] updated_port = self._update_port( [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] _ensure_no_port_binding_failure(port) [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] raise exception.PortBindingFailed(port_id=port['id']) [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] nova.exception.PortBindingFailed: Binding failed for port d60ae728-6058-4f11-b583-7270bc28e3f7, please check neutron logs for more information. [ 679.900976] env[61852]: ERROR nova.compute.manager [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] [ 679.901429] env[61852]: DEBUG nova.compute.utils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Binding failed for port d60ae728-6058-4f11-b583-7270bc28e3f7, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 679.902404] env[61852]: DEBUG nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Build of instance 394a7258-a9e0-4b16-a125-01e8cdfe7026 was re-scheduled: Binding failed for port d60ae728-6058-4f11-b583-7270bc28e3f7, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 679.903848] env[61852]: DEBUG nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 679.904102] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquiring lock "refresh_cache-394a7258-a9e0-4b16-a125-01e8cdfe7026" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.904255] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Acquired lock "refresh_cache-394a7258-a9e0-4b16-a125-01e8cdfe7026" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.904419] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.905454] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.586s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.907404] env[61852]: INFO nova.compute.claims [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.922903] env[61852]: DEBUG nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 679.953724] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 679.953724] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 679.953883] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 679.953959] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 679.954118] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 679.954265] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 679.954469] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 679.954625] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 679.954787] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 679.954944] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 679.955128] env[61852]: DEBUG nova.virt.hardware [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 679.956010] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cee70c0-eb7f-49bd-bd01-38b36bdb703f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.968214] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d911c4-18ae-4f92-9d70-d679aefc92bd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.217477] env[61852]: DEBUG nova.network.neutron [-] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.512304] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.548257] env[61852]: ERROR nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port efce19b8-8baf-4b37-bf7a-35689b6f6462, please check neutron logs for more information. [ 680.548257] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 680.548257] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.548257] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 680.548257] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.548257] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 680.548257] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.548257] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 680.548257] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.548257] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 680.548257] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.548257] env[61852]: ERROR nova.compute.manager raise self.value [ 680.548257] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.548257] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 680.548257] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.548257] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 680.548743] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.548743] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 680.548743] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port efce19b8-8baf-4b37-bf7a-35689b6f6462, please check neutron logs for more information. [ 680.548743] env[61852]: ERROR nova.compute.manager [ 680.548743] env[61852]: Traceback (most recent call last): [ 680.548743] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 680.548743] env[61852]: listener.cb(fileno) [ 680.548743] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.548743] env[61852]: result = function(*args, **kwargs) [ 680.548743] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 680.548743] env[61852]: return func(*args, **kwargs) [ 680.548743] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 680.548743] env[61852]: raise e [ 680.548743] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.548743] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 680.548743] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.548743] env[61852]: created_port_ids = self._update_ports_for_instance( [ 680.548743] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.548743] env[61852]: with excutils.save_and_reraise_exception(): [ 680.548743] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.548743] env[61852]: self.force_reraise() [ 680.548743] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.548743] env[61852]: raise self.value [ 680.548743] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.548743] env[61852]: updated_port = self._update_port( [ 680.548743] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.548743] env[61852]: _ensure_no_port_binding_failure(port) [ 680.548743] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.548743] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 680.549485] env[61852]: nova.exception.PortBindingFailed: Binding failed for port efce19b8-8baf-4b37-bf7a-35689b6f6462, please check neutron logs for more information. [ 680.549485] env[61852]: Removing descriptor: 19 [ 680.549485] env[61852]: ERROR nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port efce19b8-8baf-4b37-bf7a-35689b6f6462, please check neutron logs for more information. [ 680.549485] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Traceback (most recent call last): [ 680.549485] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 680.549485] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] yield resources [ 680.549485] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 680.549485] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] self.driver.spawn(context, instance, image_meta, [ 680.549485] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 680.549485] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] self._vmops.spawn(context, instance, image_meta, injected_files, [ 680.549485] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 680.549485] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] vm_ref = self.build_virtual_machine(instance, [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] vif_infos = vmwarevif.get_vif_info(self._session, [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] for vif in network_info: [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] return self._sync_wrapper(fn, *args, **kwargs) [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] self.wait() [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] self[:] = self._gt.wait() [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] return self._exit_event.wait() [ 680.549795] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] result = hub.switch() [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] return self.greenlet.switch() [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] result = function(*args, **kwargs) [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] return func(*args, **kwargs) [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] raise e [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] nwinfo = self.network_api.allocate_for_instance( [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.550604] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] created_port_ids = self._update_ports_for_instance( [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] with excutils.save_and_reraise_exception(): [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] self.force_reraise() [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] raise self.value [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] updated_port = self._update_port( [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] _ensure_no_port_binding_failure(port) [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.551152] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] raise exception.PortBindingFailed(port_id=port['id']) [ 680.551620] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] nova.exception.PortBindingFailed: Binding failed for port efce19b8-8baf-4b37-bf7a-35689b6f6462, please check neutron logs for more information. [ 680.551620] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] [ 680.551620] env[61852]: INFO nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Terminating instance [ 680.554164] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Acquiring lock "refresh_cache-f9e90a57-da19-4b1a-81cb-8a6433e09785" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.554867] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Acquired lock "refresh_cache-f9e90a57-da19-4b1a-81cb-8a6433e09785" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.554867] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 680.630900] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.721640] env[61852]: INFO nova.compute.manager [-] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Took 1.03 seconds to deallocate network for instance. [ 680.725869] env[61852]: DEBUG nova.compute.claims [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 680.726076] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.078421] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.137017] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Releasing lock "refresh_cache-394a7258-a9e0-4b16-a125-01e8cdfe7026" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.137017] env[61852]: DEBUG nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 681.137017] env[61852]: DEBUG nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 681.137186] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.157709] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.166743] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.302422] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14398498-05f0-442c-8bae-70a1966533cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.309182] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c20ee66-2a24-4602-a694-c275a47060bc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.339443] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dddba7-c9bf-41ad-b60c-5fd84ae01039 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.349025] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d473f872-0231-4d29-a308-03ed2d8f1160 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.363775] env[61852]: DEBUG nova.compute.provider_tree [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.423988] env[61852]: DEBUG nova.compute.manager [req-fb5b4535-c98e-4c3b-8455-e8a0b5d324be req-8492c0d5-70cc-428c-97af-ba513a8bdc06 service nova] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Received event network-changed-efce19b8-8baf-4b37-bf7a-35689b6f6462 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 681.423988] env[61852]: DEBUG nova.compute.manager [req-fb5b4535-c98e-4c3b-8455-e8a0b5d324be req-8492c0d5-70cc-428c-97af-ba513a8bdc06 service nova] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Refreshing instance network info cache due to event network-changed-efce19b8-8baf-4b37-bf7a-35689b6f6462. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 681.423988] env[61852]: DEBUG oslo_concurrency.lockutils [req-fb5b4535-c98e-4c3b-8455-e8a0b5d324be req-8492c0d5-70cc-428c-97af-ba513a8bdc06 service nova] Acquiring lock "refresh_cache-f9e90a57-da19-4b1a-81cb-8a6433e09785" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.472891] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquiring lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.473132] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.660728] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Releasing lock "refresh_cache-f9e90a57-da19-4b1a-81cb-8a6433e09785" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.661270] env[61852]: DEBUG nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 681.661603] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 681.661807] env[61852]: DEBUG oslo_concurrency.lockutils [req-fb5b4535-c98e-4c3b-8455-e8a0b5d324be req-8492c0d5-70cc-428c-97af-ba513a8bdc06 service nova] Acquired lock "refresh_cache-f9e90a57-da19-4b1a-81cb-8a6433e09785" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.661988] env[61852]: DEBUG nova.network.neutron [req-fb5b4535-c98e-4c3b-8455-e8a0b5d324be req-8492c0d5-70cc-428c-97af-ba513a8bdc06 service nova] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Refreshing network info cache for port efce19b8-8baf-4b37-bf7a-35689b6f6462 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 681.663078] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04e3740a-3fc3-4313-8123-42073e096faa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.670040] env[61852]: DEBUG nova.network.neutron [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.674109] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76744876-24eb-426c-8b41-f3089b297c78 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.703651] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f9e90a57-da19-4b1a-81cb-8a6433e09785 could not be found. [ 681.703886] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 681.704081] env[61852]: INFO nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Took 0.04 seconds to destroy the instance on the hypervisor. [ 681.704450] env[61852]: DEBUG oslo.service.loopingcall [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 681.704650] env[61852]: DEBUG nova.compute.manager [-] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 681.704650] env[61852]: DEBUG nova.network.neutron [-] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.728565] env[61852]: DEBUG nova.network.neutron [-] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.866616] env[61852]: DEBUG nova.scheduler.client.report [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 682.185510] env[61852]: DEBUG nova.network.neutron [req-fb5b4535-c98e-4c3b-8455-e8a0b5d324be req-8492c0d5-70cc-428c-97af-ba513a8bdc06 service nova] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.191898] env[61852]: INFO nova.compute.manager [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] [instance: 394a7258-a9e0-4b16-a125-01e8cdfe7026] Took 1.05 seconds to deallocate network for instance. [ 682.230054] env[61852]: DEBUG nova.network.neutron [-] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.252459] env[61852]: DEBUG nova.network.neutron [req-fb5b4535-c98e-4c3b-8455-e8a0b5d324be req-8492c0d5-70cc-428c-97af-ba513a8bdc06 service nova] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.373879] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.468s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.374478] env[61852]: DEBUG nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 682.377616] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.901s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.735052] env[61852]: INFO nova.compute.manager [-] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Took 1.03 seconds to deallocate network for instance. [ 682.740699] env[61852]: DEBUG nova.compute.claims [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 682.740789] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.758744] env[61852]: DEBUG oslo_concurrency.lockutils [req-fb5b4535-c98e-4c3b-8455-e8a0b5d324be req-8492c0d5-70cc-428c-97af-ba513a8bdc06 service nova] Releasing lock "refresh_cache-f9e90a57-da19-4b1a-81cb-8a6433e09785" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.759024] env[61852]: DEBUG nova.compute.manager [req-fb5b4535-c98e-4c3b-8455-e8a0b5d324be req-8492c0d5-70cc-428c-97af-ba513a8bdc06 service nova] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Received event network-vif-deleted-efce19b8-8baf-4b37-bf7a-35689b6f6462 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 682.888074] env[61852]: DEBUG nova.compute.utils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 682.892408] env[61852]: DEBUG nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 682.892633] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 682.951973] env[61852]: DEBUG nova.policy [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'feb8889701b5465c8d9f6c8a56b05b54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'caa8803d33d347659576719bc394599d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 683.223954] env[61852]: INFO nova.scheduler.client.report [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Deleted allocations for instance 394a7258-a9e0-4b16-a125-01e8cdfe7026 [ 683.247030] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Successfully created port: e9229b79-d2a3-4cfb-967e-c21509bb241e {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 683.300227] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf20a2d9-f4c0-46fe-bd60-b334d5c90d42 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.308308] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035ad43a-71c3-426c-849d-5e62731d3079 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.344138] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfabfc3-68c9-48de-86a6-31605143f9fd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.353128] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b88819c-50cf-4944-ab78-cb1485c398b9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.367726] env[61852]: DEBUG nova.compute.provider_tree [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.392072] env[61852]: DEBUG nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 683.418826] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquiring lock "46ccab1f-b7af-49df-a38d-af1fa3bac486" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.419333] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Lock "46ccab1f-b7af-49df-a38d-af1fa3bac486" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.733276] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6567982c-d8c5-4b1b-9fe5-418a05435b94 tempest-DeleteServersAdminTestJSON-1236310735 tempest-DeleteServersAdminTestJSON-1236310735-project-member] Lock "394a7258-a9e0-4b16-a125-01e8cdfe7026" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.402s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.871026] env[61852]: DEBUG nova.scheduler.client.report [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 683.978420] env[61852]: DEBUG nova.compute.manager [req-8fd823a6-e274-4cb6-a7fc-efd30f477f3d req-733d5b90-e9b1-4dcc-9fa9-4ed90893a78f service nova] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Received event network-changed-e9229b79-d2a3-4cfb-967e-c21509bb241e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 683.978604] env[61852]: DEBUG nova.compute.manager [req-8fd823a6-e274-4cb6-a7fc-efd30f477f3d req-733d5b90-e9b1-4dcc-9fa9-4ed90893a78f service nova] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Refreshing instance network info cache due to event network-changed-e9229b79-d2a3-4cfb-967e-c21509bb241e. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 683.978811] env[61852]: DEBUG oslo_concurrency.lockutils [req-8fd823a6-e274-4cb6-a7fc-efd30f477f3d req-733d5b90-e9b1-4dcc-9fa9-4ed90893a78f service nova] Acquiring lock "refresh_cache-593106da-0c81-448a-b3ba-fd6007dcdd98" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.978942] env[61852]: DEBUG oslo_concurrency.lockutils [req-8fd823a6-e274-4cb6-a7fc-efd30f477f3d req-733d5b90-e9b1-4dcc-9fa9-4ed90893a78f service nova] Acquired lock "refresh_cache-593106da-0c81-448a-b3ba-fd6007dcdd98" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.979106] env[61852]: DEBUG nova.network.neutron [req-8fd823a6-e274-4cb6-a7fc-efd30f477f3d req-733d5b90-e9b1-4dcc-9fa9-4ed90893a78f service nova] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Refreshing network info cache for port e9229b79-d2a3-4cfb-967e-c21509bb241e {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 684.160992] env[61852]: ERROR nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e9229b79-d2a3-4cfb-967e-c21509bb241e, please check neutron logs for more information. [ 684.160992] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 684.160992] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.160992] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 684.160992] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.160992] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 684.160992] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.160992] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 684.160992] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.160992] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 684.160992] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.160992] env[61852]: ERROR nova.compute.manager raise self.value [ 684.160992] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.160992] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 684.160992] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.160992] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 684.161748] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.161748] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 684.161748] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e9229b79-d2a3-4cfb-967e-c21509bb241e, please check neutron logs for more information. [ 684.161748] env[61852]: ERROR nova.compute.manager [ 684.161748] env[61852]: Traceback (most recent call last): [ 684.161748] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 684.161748] env[61852]: listener.cb(fileno) [ 684.161748] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.161748] env[61852]: result = function(*args, **kwargs) [ 684.161748] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 684.161748] env[61852]: return func(*args, **kwargs) [ 684.161748] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.161748] env[61852]: raise e [ 684.161748] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.161748] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 684.161748] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.161748] env[61852]: created_port_ids = self._update_ports_for_instance( [ 684.161748] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.161748] env[61852]: with excutils.save_and_reraise_exception(): [ 684.161748] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.161748] env[61852]: self.force_reraise() [ 684.161748] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.161748] env[61852]: raise self.value [ 684.161748] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.161748] env[61852]: updated_port = self._update_port( [ 684.161748] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.161748] env[61852]: _ensure_no_port_binding_failure(port) [ 684.161748] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.161748] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 684.162924] env[61852]: nova.exception.PortBindingFailed: Binding failed for port e9229b79-d2a3-4cfb-967e-c21509bb241e, please check neutron logs for more information. [ 684.162924] env[61852]: Removing descriptor: 19 [ 684.239678] env[61852]: DEBUG nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 684.375931] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.999s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.376647] env[61852]: ERROR nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906, please check neutron logs for more information. [ 684.376647] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Traceback (most recent call last): [ 684.376647] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 684.376647] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] self.driver.spawn(context, instance, image_meta, [ 684.376647] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 684.376647] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.376647] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 684.376647] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] vm_ref = self.build_virtual_machine(instance, [ 684.376647] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 684.376647] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] vif_infos = vmwarevif.get_vif_info(self._session, [ 684.376647] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] for vif in network_info: [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] return self._sync_wrapper(fn, *args, **kwargs) [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] self.wait() [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] self[:] = self._gt.wait() [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] return self._exit_event.wait() [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] result = hub.switch() [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 684.376978] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] return self.greenlet.switch() [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] result = function(*args, **kwargs) [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] return func(*args, **kwargs) [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] raise e [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] nwinfo = self.network_api.allocate_for_instance( [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] created_port_ids = self._update_ports_for_instance( [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] with excutils.save_and_reraise_exception(): [ 684.377320] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] self.force_reraise() [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] raise self.value [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] updated_port = self._update_port( [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] _ensure_no_port_binding_failure(port) [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] raise exception.PortBindingFailed(port_id=port['id']) [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] nova.exception.PortBindingFailed: Binding failed for port 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906, please check neutron logs for more information. [ 684.377871] env[61852]: ERROR nova.compute.manager [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] [ 684.378163] env[61852]: DEBUG nova.compute.utils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Binding failed for port 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 684.378971] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.630s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.384778] env[61852]: DEBUG nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Build of instance fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79 was re-scheduled: Binding failed for port 0c0dcdbf-4adf-4f9a-b47b-e0f74c024906, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 684.385272] env[61852]: DEBUG nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 684.385745] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "refresh_cache-fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.385745] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "refresh_cache-fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.385928] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 684.402154] env[61852]: DEBUG nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 684.434969] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 684.435249] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 684.435405] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.435614] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 684.435775] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.435920] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 684.436215] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 684.436422] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 684.436642] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 684.436846] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 684.437070] env[61852]: DEBUG nova.virt.hardware [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 684.438892] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ac31f7-e3af-400e-b630-8b8285b7f9c3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.449092] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d64e71-3a96-4e64-b3fe-a732c814efd9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.463021] env[61852]: ERROR nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e9229b79-d2a3-4cfb-967e-c21509bb241e, please check neutron logs for more information. [ 684.463021] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Traceback (most recent call last): [ 684.463021] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 684.463021] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] yield resources [ 684.463021] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 684.463021] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] self.driver.spawn(context, instance, image_meta, [ 684.463021] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 684.463021] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.463021] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 684.463021] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] vm_ref = self.build_virtual_machine(instance, [ 684.463021] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] vif_infos = vmwarevif.get_vif_info(self._session, [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] for vif in network_info: [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] return self._sync_wrapper(fn, *args, **kwargs) [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] self.wait() [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] self[:] = self._gt.wait() [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] return self._exit_event.wait() [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 684.463458] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] current.throw(*self._exc) [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] result = function(*args, **kwargs) [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] return func(*args, **kwargs) [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] raise e [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] nwinfo = self.network_api.allocate_for_instance( [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] created_port_ids = self._update_ports_for_instance( [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] with excutils.save_and_reraise_exception(): [ 684.463817] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] self.force_reraise() [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] raise self.value [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] updated_port = self._update_port( [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] _ensure_no_port_binding_failure(port) [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] raise exception.PortBindingFailed(port_id=port['id']) [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] nova.exception.PortBindingFailed: Binding failed for port e9229b79-d2a3-4cfb-967e-c21509bb241e, please check neutron logs for more information. [ 684.464171] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] [ 684.464171] env[61852]: INFO nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Terminating instance [ 684.465396] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Acquiring lock "refresh_cache-593106da-0c81-448a-b3ba-fd6007dcdd98" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.499475] env[61852]: DEBUG nova.network.neutron [req-8fd823a6-e274-4cb6-a7fc-efd30f477f3d req-733d5b90-e9b1-4dcc-9fa9-4ed90893a78f service nova] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.593789] env[61852]: DEBUG nova.network.neutron [req-8fd823a6-e274-4cb6-a7fc-efd30f477f3d req-733d5b90-e9b1-4dcc-9fa9-4ed90893a78f service nova] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.760443] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.910127] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.038696] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.096275] env[61852]: DEBUG oslo_concurrency.lockutils [req-8fd823a6-e274-4cb6-a7fc-efd30f477f3d req-733d5b90-e9b1-4dcc-9fa9-4ed90893a78f service nova] Releasing lock "refresh_cache-593106da-0c81-448a-b3ba-fd6007dcdd98" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.096650] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Acquired lock "refresh_cache-593106da-0c81-448a-b3ba-fd6007dcdd98" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.096825] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.215322] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79cc7926-6a4a-4d84-87e5-b3c15a1e20ad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.224017] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352dc041-1760-4465-b47e-57a66237f687 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.256035] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aae813b-2c4c-4c64-bcd0-7246e88101b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.263939] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2f0aa0-05cc-407e-b84b-fb3bcc57572c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.277412] env[61852]: DEBUG nova.compute.provider_tree [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.542844] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "refresh_cache-fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.543094] env[61852]: DEBUG nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 685.543275] env[61852]: DEBUG nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 685.543436] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 685.558411] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.616262] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.693679] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.780415] env[61852]: DEBUG nova.scheduler.client.report [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 686.005355] env[61852]: DEBUG nova.compute.manager [req-d7a6330c-2ce9-45c2-8ddf-49a2321446e1 req-20d36532-de62-42b8-8399-a0496ea8cec0 service nova] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Received event network-vif-deleted-e9229b79-d2a3-4cfb-967e-c21509bb241e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 686.061477] env[61852]: DEBUG nova.network.neutron [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.197086] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Releasing lock "refresh_cache-593106da-0c81-448a-b3ba-fd6007dcdd98" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.197395] env[61852]: DEBUG nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 686.197588] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 686.197893] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-591f7ec9-13de-450f-b341-9479be226ae4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.207304] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a858ab-1e70-4c53-8dd1-be3d54855829 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.227944] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 593106da-0c81-448a-b3ba-fd6007dcdd98 could not be found. [ 686.228175] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 686.228353] env[61852]: INFO nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Took 0.03 seconds to destroy the instance on the hypervisor. [ 686.228676] env[61852]: DEBUG oslo.service.loopingcall [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 686.228804] env[61852]: DEBUG nova.compute.manager [-] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 686.228898] env[61852]: DEBUG nova.network.neutron [-] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 686.243259] env[61852]: DEBUG nova.network.neutron [-] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.286126] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.907s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.286740] env[61852]: ERROR nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7228a44c-d207-4fcd-9160-bd99a3dcbcad, please check neutron logs for more information. [ 686.286740] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Traceback (most recent call last): [ 686.286740] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 686.286740] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] self.driver.spawn(context, instance, image_meta, [ 686.286740] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 686.286740] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 686.286740] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 686.286740] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] vm_ref = self.build_virtual_machine(instance, [ 686.286740] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 686.286740] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 686.286740] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] for vif in network_info: [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] return self._sync_wrapper(fn, *args, **kwargs) [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] self.wait() [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] self[:] = self._gt.wait() [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] return self._exit_event.wait() [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] current.throw(*self._exc) [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 686.287210] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] result = function(*args, **kwargs) [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] return func(*args, **kwargs) [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] raise e [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] nwinfo = self.network_api.allocate_for_instance( [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] created_port_ids = self._update_ports_for_instance( [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] with excutils.save_and_reraise_exception(): [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] self.force_reraise() [ 686.287604] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 686.287984] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] raise self.value [ 686.287984] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 686.287984] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] updated_port = self._update_port( [ 686.287984] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 686.287984] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] _ensure_no_port_binding_failure(port) [ 686.287984] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 686.287984] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] raise exception.PortBindingFailed(port_id=port['id']) [ 686.287984] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] nova.exception.PortBindingFailed: Binding failed for port 7228a44c-d207-4fcd-9160-bd99a3dcbcad, please check neutron logs for more information. [ 686.287984] env[61852]: ERROR nova.compute.manager [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] [ 686.287984] env[61852]: DEBUG nova.compute.utils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Binding failed for port 7228a44c-d207-4fcd-9160-bd99a3dcbcad, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 686.288586] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.924s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.292033] env[61852]: DEBUG nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Build of instance b566ea57-9b1a-4869-be7c-9ba579db25dc was re-scheduled: Binding failed for port 7228a44c-d207-4fcd-9160-bd99a3dcbcad, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 686.292468] env[61852]: DEBUG nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 686.292683] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Acquiring lock "refresh_cache-b566ea57-9b1a-4869-be7c-9ba579db25dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.292900] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Acquired lock "refresh_cache-b566ea57-9b1a-4869-be7c-9ba579db25dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.292972] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 686.564298] env[61852]: INFO nova.compute.manager [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79] Took 1.02 seconds to deallocate network for instance. [ 686.745209] env[61852]: DEBUG nova.network.neutron [-] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.814204] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.910178] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.137717] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21a18cb-5519-4f74-9e9a-a44d1f1cf02d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.146244] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f845c6-1cea-4d03-8b54-7451f577b6d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.176620] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586d9c2f-6bb6-4eec-8c27-4682819784d6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.183967] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc5f9a8-758a-4ba2-97f3-e29e3696651e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.197895] env[61852]: DEBUG nova.compute.provider_tree [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.247762] env[61852]: INFO nova.compute.manager [-] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Took 1.02 seconds to deallocate network for instance. [ 687.250139] env[61852]: DEBUG nova.compute.claims [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 687.250223] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.413107] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Releasing lock "refresh_cache-b566ea57-9b1a-4869-be7c-9ba579db25dc" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.413363] env[61852]: DEBUG nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 687.413547] env[61852]: DEBUG nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 687.413771] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 687.430513] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.595310] env[61852]: INFO nova.scheduler.client.report [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleted allocations for instance fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79 [ 687.700469] env[61852]: DEBUG nova.scheduler.client.report [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 687.934022] env[61852]: DEBUG nova.network.neutron [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.104084] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a92b196b-1ef5-475d-9f13-1c5b4b2e9738 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "fe9b7dd3-2cdb-49a4-b4fd-54a4e9654b79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.851s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.205467] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.917s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.206187] env[61852]: ERROR nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a8ac770f-e371-4335-b908-9d47067c7fd3, please check neutron logs for more information. [ 688.206187] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Traceback (most recent call last): [ 688.206187] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 688.206187] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] self.driver.spawn(context, instance, image_meta, [ 688.206187] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 688.206187] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] self._vmops.spawn(context, instance, image_meta, injected_files, [ 688.206187] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 688.206187] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] vm_ref = self.build_virtual_machine(instance, [ 688.206187] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 688.206187] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] vif_infos = vmwarevif.get_vif_info(self._session, [ 688.206187] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] for vif in network_info: [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] return self._sync_wrapper(fn, *args, **kwargs) [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] self.wait() [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] self[:] = self._gt.wait() [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] return self._exit_event.wait() [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] current.throw(*self._exc) [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 688.206628] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] result = function(*args, **kwargs) [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] return func(*args, **kwargs) [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] raise e [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] nwinfo = self.network_api.allocate_for_instance( [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] created_port_ids = self._update_ports_for_instance( [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] with excutils.save_and_reraise_exception(): [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] self.force_reraise() [ 688.207025] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 688.207851] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] raise self.value [ 688.207851] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 688.207851] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] updated_port = self._update_port( [ 688.207851] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 688.207851] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] _ensure_no_port_binding_failure(port) [ 688.207851] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 688.207851] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] raise exception.PortBindingFailed(port_id=port['id']) [ 688.207851] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] nova.exception.PortBindingFailed: Binding failed for port a8ac770f-e371-4335-b908-9d47067c7fd3, please check neutron logs for more information. [ 688.207851] env[61852]: ERROR nova.compute.manager [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] [ 688.207851] env[61852]: DEBUG nova.compute.utils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Binding failed for port a8ac770f-e371-4335-b908-9d47067c7fd3, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 688.208118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.400s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.209569] env[61852]: INFO nova.compute.claims [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.212126] env[61852]: DEBUG nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Build of instance 144d5486-d438-4bca-9b68-c414cc1f4659 was re-scheduled: Binding failed for port a8ac770f-e371-4335-b908-9d47067c7fd3, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 688.212572] env[61852]: DEBUG nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 688.212797] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Acquiring lock "refresh_cache-144d5486-d438-4bca-9b68-c414cc1f4659" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.212940] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Acquired lock "refresh_cache-144d5486-d438-4bca-9b68-c414cc1f4659" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.213105] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 688.436687] env[61852]: INFO nova.compute.manager [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] [instance: b566ea57-9b1a-4869-be7c-9ba579db25dc] Took 1.02 seconds to deallocate network for instance. [ 688.606687] env[61852]: DEBUG nova.compute.manager [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 688.733076] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.817830] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.132119] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.321673] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Releasing lock "refresh_cache-144d5486-d438-4bca-9b68-c414cc1f4659" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.322256] env[61852]: DEBUG nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 689.322256] env[61852]: DEBUG nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 689.322256] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.337940] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.467262] env[61852]: INFO nova.scheduler.client.report [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Deleted allocations for instance b566ea57-9b1a-4869-be7c-9ba579db25dc [ 689.536566] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2dc87d-d9ec-4fc2-b8da-be0a6748b4a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.547021] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642e5534-5ab9-47a9-9887-2a38233aa555 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.577727] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9413f3cc-79f7-472f-adeb-b83d86fb28d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.585386] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea3929a-ec99-400d-af35-6069bf155675 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.598503] env[61852]: DEBUG nova.compute.provider_tree [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.840164] env[61852]: DEBUG nova.network.neutron [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.978074] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1e6a58b3-c76f-4629-8c2e-b82f54611fd9 tempest-ImagesOneServerNegativeTestJSON-55407056 tempest-ImagesOneServerNegativeTestJSON-55407056-project-member] Lock "b566ea57-9b1a-4869-be7c-9ba579db25dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.133s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.104021] env[61852]: DEBUG nova.scheduler.client.report [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 690.168510] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "c94066d5-2e5f-4059-bdc5-385d517f1d84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.168747] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "c94066d5-2e5f-4059-bdc5-385d517f1d84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.342916] env[61852]: INFO nova.compute.manager [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] [instance: 144d5486-d438-4bca-9b68-c414cc1f4659] Took 1.02 seconds to deallocate network for instance. [ 690.482028] env[61852]: DEBUG nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 690.608517] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.400s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.609117] env[61852]: DEBUG nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 690.611772] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.892s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.615071] env[61852]: INFO nova.compute.claims [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 691.005315] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.119026] env[61852]: DEBUG nova.compute.utils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 691.122455] env[61852]: DEBUG nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 691.122540] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 691.196894] env[61852]: DEBUG nova.policy [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8780b1622ce4d98a88fa92bee3e4bd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d65efc960c14799bcf1b26ecdf9c912', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 691.376723] env[61852]: INFO nova.scheduler.client.report [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Deleted allocations for instance 144d5486-d438-4bca-9b68-c414cc1f4659 [ 691.633360] env[61852]: DEBUG nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 691.784912] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Successfully created port: ed4d0907-7acf-46b5-b753-014f317badbd {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 691.885453] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d283d01-ac2b-4512-b674-a1af7476553f tempest-ServerActionsTestJSON-372287286 tempest-ServerActionsTestJSON-372287286-project-member] Lock "144d5486-d438-4bca-9b68-c414cc1f4659" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.591s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.045310] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "f18906e9-67b3-4537-9169-9d275e2ec4e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.045550] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "f18906e9-67b3-4537-9169-9d275e2ec4e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.052283] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4c43c0-1701-4775-afce-b26f83c570fe {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.060119] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0023a661-d271-420f-9ef7-fddcd8b2f68c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.099762] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf006411-6f6e-4211-b9ad-d6701252990a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.107308] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca1ba35-6cf1-4744-b544-6ea1efc2cb94 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.122101] env[61852]: DEBUG nova.compute.provider_tree [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.292999] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "8897a654-6805-45b0-b12b-16f7981d33ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.293413] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "8897a654-6805-45b0-b12b-16f7981d33ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.390259] env[61852]: DEBUG nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 692.624843] env[61852]: DEBUG nova.scheduler.client.report [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 692.646176] env[61852]: DEBUG nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 692.676721] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 692.676990] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 692.677234] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.677371] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 692.677517] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.677662] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 692.677867] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 692.679970] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 692.680290] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 692.680515] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 692.681090] env[61852]: DEBUG nova.virt.hardware [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 692.681820] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59875df-d559-4cdf-912f-0876bb498630 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.693050] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5a9998-5bbe-48d6-957e-335d1cedbfdb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.916096] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.941762] env[61852]: DEBUG nova.compute.manager [req-25d9e496-263c-4f1e-a8cd-d4edc0eb3d2b req-7bb11e8b-f781-44b2-82e9-21f4c191e90f service nova] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Received event network-changed-ed4d0907-7acf-46b5-b753-014f317badbd {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 692.942041] env[61852]: DEBUG nova.compute.manager [req-25d9e496-263c-4f1e-a8cd-d4edc0eb3d2b req-7bb11e8b-f781-44b2-82e9-21f4c191e90f service nova] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Refreshing instance network info cache due to event network-changed-ed4d0907-7acf-46b5-b753-014f317badbd. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 692.942407] env[61852]: DEBUG oslo_concurrency.lockutils [req-25d9e496-263c-4f1e-a8cd-d4edc0eb3d2b req-7bb11e8b-f781-44b2-82e9-21f4c191e90f service nova] Acquiring lock "refresh_cache-48b40da3-1efc-4557-a791-e88158338aec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.942407] env[61852]: DEBUG oslo_concurrency.lockutils [req-25d9e496-263c-4f1e-a8cd-d4edc0eb3d2b req-7bb11e8b-f781-44b2-82e9-21f4c191e90f service nova] Acquired lock "refresh_cache-48b40da3-1efc-4557-a791-e88158338aec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.942716] env[61852]: DEBUG nova.network.neutron [req-25d9e496-263c-4f1e-a8cd-d4edc0eb3d2b req-7bb11e8b-f781-44b2-82e9-21f4c191e90f service nova] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Refreshing network info cache for port ed4d0907-7acf-46b5-b753-014f317badbd {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 693.131252] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.131859] env[61852]: DEBUG nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 693.134563] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.462s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.135973] env[61852]: INFO nova.compute.claims [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 693.207777] env[61852]: ERROR nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ed4d0907-7acf-46b5-b753-014f317badbd, please check neutron logs for more information. [ 693.207777] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 693.207777] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 693.207777] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 693.207777] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 693.207777] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 693.207777] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 693.207777] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 693.207777] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.207777] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 693.207777] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.207777] env[61852]: ERROR nova.compute.manager raise self.value [ 693.207777] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 693.207777] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 693.207777] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.207777] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 693.208328] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.208328] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 693.208328] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ed4d0907-7acf-46b5-b753-014f317badbd, please check neutron logs for more information. [ 693.208328] env[61852]: ERROR nova.compute.manager [ 693.208328] env[61852]: Traceback (most recent call last): [ 693.208328] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 693.208328] env[61852]: listener.cb(fileno) [ 693.208328] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 693.208328] env[61852]: result = function(*args, **kwargs) [ 693.208328] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 693.208328] env[61852]: return func(*args, **kwargs) [ 693.208328] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 693.208328] env[61852]: raise e [ 693.208328] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 693.208328] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 693.208328] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 693.208328] env[61852]: created_port_ids = self._update_ports_for_instance( [ 693.208328] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 693.208328] env[61852]: with excutils.save_and_reraise_exception(): [ 693.208328] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.208328] env[61852]: self.force_reraise() [ 693.208328] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.208328] env[61852]: raise self.value [ 693.208328] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 693.208328] env[61852]: updated_port = self._update_port( [ 693.208328] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.208328] env[61852]: _ensure_no_port_binding_failure(port) [ 693.208328] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.208328] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 693.210274] env[61852]: nova.exception.PortBindingFailed: Binding failed for port ed4d0907-7acf-46b5-b753-014f317badbd, please check neutron logs for more information. [ 693.210274] env[61852]: Removing descriptor: 19 [ 693.210274] env[61852]: ERROR nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ed4d0907-7acf-46b5-b753-014f317badbd, please check neutron logs for more information. [ 693.210274] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] Traceback (most recent call last): [ 693.210274] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 693.210274] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] yield resources [ 693.210274] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 693.210274] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] self.driver.spawn(context, instance, image_meta, [ 693.210274] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 693.210274] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 693.210274] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 693.210274] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] vm_ref = self.build_virtual_machine(instance, [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] vif_infos = vmwarevif.get_vif_info(self._session, [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] for vif in network_info: [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] return self._sync_wrapper(fn, *args, **kwargs) [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] self.wait() [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] self[:] = self._gt.wait() [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] return self._exit_event.wait() [ 693.210743] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] result = hub.switch() [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] return self.greenlet.switch() [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] result = function(*args, **kwargs) [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] return func(*args, **kwargs) [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] raise e [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] nwinfo = self.network_api.allocate_for_instance( [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 693.211154] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] created_port_ids = self._update_ports_for_instance( [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] with excutils.save_and_reraise_exception(): [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] self.force_reraise() [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] raise self.value [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] updated_port = self._update_port( [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] _ensure_no_port_binding_failure(port) [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.211575] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] raise exception.PortBindingFailed(port_id=port['id']) [ 693.212093] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] nova.exception.PortBindingFailed: Binding failed for port ed4d0907-7acf-46b5-b753-014f317badbd, please check neutron logs for more information. [ 693.212093] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] [ 693.212093] env[61852]: INFO nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Terminating instance [ 693.212093] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "refresh_cache-48b40da3-1efc-4557-a791-e88158338aec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.464742] env[61852]: DEBUG nova.network.neutron [req-25d9e496-263c-4f1e-a8cd-d4edc0eb3d2b req-7bb11e8b-f781-44b2-82e9-21f4c191e90f service nova] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.582718] env[61852]: DEBUG nova.network.neutron [req-25d9e496-263c-4f1e-a8cd-d4edc0eb3d2b req-7bb11e8b-f781-44b2-82e9-21f4c191e90f service nova] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.641608] env[61852]: DEBUG nova.compute.utils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 693.644917] env[61852]: DEBUG nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 693.645087] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 693.718884] env[61852]: DEBUG nova.policy [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b14c40221eb54eef808cb36a5a744e2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '725ea50888c642fda66d87e41472c8d7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 694.088790] env[61852]: DEBUG oslo_concurrency.lockutils [req-25d9e496-263c-4f1e-a8cd-d4edc0eb3d2b req-7bb11e8b-f781-44b2-82e9-21f4c191e90f service nova] Releasing lock "refresh_cache-48b40da3-1efc-4557-a791-e88158338aec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.089210] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquired lock "refresh_cache-48b40da3-1efc-4557-a791-e88158338aec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.089413] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 694.148018] env[61852]: DEBUG nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 694.220118] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Successfully created port: 9073c855-90cf-41ad-8ef6-a60909d19c57 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.541706] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084afab0-dee4-472c-9943-0ea3d070f717 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.551017] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7815173-83ad-44ef-962f-4ae95563644e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.582489] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601c8316-dc7c-441e-bb36-d5c04d51cfa3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.589893] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4beb903-f674-40e1-91e1-8ab57650d69d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.609495] env[61852]: DEBUG nova.compute.provider_tree [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.636697] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.874298] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.112901] env[61852]: DEBUG nova.compute.manager [req-d845308c-2540-4408-a43d-fa7031021720 req-940edfc8-f2cf-44d5-9dd3-f2bec24b01b1 service nova] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Received event network-vif-deleted-ed4d0907-7acf-46b5-b753-014f317badbd {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 695.118266] env[61852]: DEBUG nova.scheduler.client.report [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 695.162916] env[61852]: DEBUG nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 695.198652] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 695.198891] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 695.199102] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 695.199323] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 695.200283] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 695.200396] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 695.203864] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 695.203864] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 695.203864] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 695.203864] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 695.203864] env[61852]: DEBUG nova.virt.hardware [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 695.204169] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7687d2eb-14b4-4d1a-9a21-21777ab1d932 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.211876] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444ee506-97e6-4e72-bed8-60c8fa505f9d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.375909] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Releasing lock "refresh_cache-48b40da3-1efc-4557-a791-e88158338aec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.376380] env[61852]: DEBUG nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 695.376576] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 695.376912] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ad3d102-f6d4-4890-9add-332514c1c3f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.386923] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f599e9c0-90ed-4f06-aa5a-bd7e5f8728df {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.413409] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 48b40da3-1efc-4557-a791-e88158338aec could not be found. [ 695.413663] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 695.413867] env[61852]: INFO nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Took 0.04 seconds to destroy the instance on the hypervisor. [ 695.414102] env[61852]: DEBUG oslo.service.loopingcall [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 695.414329] env[61852]: DEBUG nova.compute.manager [-] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 695.414426] env[61852]: DEBUG nova.network.neutron [-] [instance: 48b40da3-1efc-4557-a791-e88158338aec] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 695.450602] env[61852]: ERROR nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9073c855-90cf-41ad-8ef6-a60909d19c57, please check neutron logs for more information. [ 695.450602] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 695.450602] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 695.450602] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 695.450602] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.450602] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 695.450602] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.450602] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 695.450602] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.450602] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 695.450602] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.450602] env[61852]: ERROR nova.compute.manager raise self.value [ 695.450602] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.450602] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 695.450602] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.450602] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 695.451091] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.451091] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 695.451091] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9073c855-90cf-41ad-8ef6-a60909d19c57, please check neutron logs for more information. [ 695.451091] env[61852]: ERROR nova.compute.manager [ 695.451091] env[61852]: Traceback (most recent call last): [ 695.451091] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 695.451091] env[61852]: listener.cb(fileno) [ 695.451091] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.451091] env[61852]: result = function(*args, **kwargs) [ 695.451091] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 695.451091] env[61852]: return func(*args, **kwargs) [ 695.451091] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 695.451091] env[61852]: raise e [ 695.451091] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 695.451091] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 695.451091] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.451091] env[61852]: created_port_ids = self._update_ports_for_instance( [ 695.451091] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.451091] env[61852]: with excutils.save_and_reraise_exception(): [ 695.451091] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.451091] env[61852]: self.force_reraise() [ 695.451091] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.451091] env[61852]: raise self.value [ 695.451091] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.451091] env[61852]: updated_port = self._update_port( [ 695.451091] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.451091] env[61852]: _ensure_no_port_binding_failure(port) [ 695.451091] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.451091] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 695.451903] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 9073c855-90cf-41ad-8ef6-a60909d19c57, please check neutron logs for more information. [ 695.451903] env[61852]: Removing descriptor: 19 [ 695.451903] env[61852]: ERROR nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9073c855-90cf-41ad-8ef6-a60909d19c57, please check neutron logs for more information. [ 695.451903] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Traceback (most recent call last): [ 695.451903] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 695.451903] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] yield resources [ 695.451903] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 695.451903] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] self.driver.spawn(context, instance, image_meta, [ 695.451903] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 695.451903] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 695.451903] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 695.451903] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] vm_ref = self.build_virtual_machine(instance, [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] vif_infos = vmwarevif.get_vif_info(self._session, [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] for vif in network_info: [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] return self._sync_wrapper(fn, *args, **kwargs) [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] self.wait() [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] self[:] = self._gt.wait() [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] return self._exit_event.wait() [ 695.452237] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] result = hub.switch() [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] return self.greenlet.switch() [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] result = function(*args, **kwargs) [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] return func(*args, **kwargs) [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] raise e [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] nwinfo = self.network_api.allocate_for_instance( [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.452731] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] created_port_ids = self._update_ports_for_instance( [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] with excutils.save_and_reraise_exception(): [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] self.force_reraise() [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] raise self.value [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] updated_port = self._update_port( [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] _ensure_no_port_binding_failure(port) [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.453072] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] raise exception.PortBindingFailed(port_id=port['id']) [ 695.453475] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] nova.exception.PortBindingFailed: Binding failed for port 9073c855-90cf-41ad-8ef6-a60909d19c57, please check neutron logs for more information. [ 695.453475] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] [ 695.453475] env[61852]: INFO nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Terminating instance [ 695.456216] env[61852]: DEBUG nova.network.neutron [-] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.456447] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Acquiring lock "refresh_cache-29cb49fe-627a-4f0f-919b-58f764cd63d0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.456601] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Acquired lock "refresh_cache-29cb49fe-627a-4f0f-919b-58f764cd63d0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.456757] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 695.623798] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.489s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.624345] env[61852]: DEBUG nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 695.627424] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.901s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.960706] env[61852]: DEBUG nova.network.neutron [-] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.039538] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.132532] env[61852]: DEBUG nova.compute.utils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 696.138958] env[61852]: DEBUG nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 696.139156] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 696.177895] env[61852]: DEBUG nova.policy [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78cfb26764024e058824fc75cfdf7369', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed3e7df5a616492e93f37f646ca11ba0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 696.215568] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.466168] env[61852]: INFO nova.compute.manager [-] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Took 1.05 seconds to deallocate network for instance. [ 696.468203] env[61852]: DEBUG nova.compute.claims [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 696.468370] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.509857] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206e9ace-363f-46bc-84ab-b55e7634849c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.517479] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd902024-08e2-4f02-bc86-d8801d6292e9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.551213] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd7562f-9a82-4b9f-9d1d-1e5a146070eb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.560361] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5d8077-6ba1-46bd-984e-7a64b59ac2d8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.577542] env[61852]: DEBUG nova.compute.provider_tree [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.589624] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Successfully created port: 6eada10d-828c-47ae-98fc-a2c58e5caf26 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.639805] env[61852]: DEBUG nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 696.718050] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Releasing lock "refresh_cache-29cb49fe-627a-4f0f-919b-58f764cd63d0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.718486] env[61852]: DEBUG nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 696.718675] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 696.718999] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c89cb16-ca39-4044-bf50-80329933fbc8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.730248] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7530aceb-2e9d-4757-90e7-8c84e13f4f3f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.754015] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 29cb49fe-627a-4f0f-919b-58f764cd63d0 could not be found. [ 696.754259] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 696.754434] env[61852]: INFO nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 696.754666] env[61852]: DEBUG oslo.service.loopingcall [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 696.754872] env[61852]: DEBUG nova.compute.manager [-] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 696.754961] env[61852]: DEBUG nova.network.neutron [-] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 696.771190] env[61852]: DEBUG nova.network.neutron [-] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.081553] env[61852]: DEBUG nova.scheduler.client.report [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 697.150950] env[61852]: DEBUG nova.compute.manager [req-0dbceba5-72ec-4fbe-84a0-7501cfb9faed req-d41a7f4c-a7be-4c40-a444-062cae3840e4 service nova] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Received event network-changed-9073c855-90cf-41ad-8ef6-a60909d19c57 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 697.151221] env[61852]: DEBUG nova.compute.manager [req-0dbceba5-72ec-4fbe-84a0-7501cfb9faed req-d41a7f4c-a7be-4c40-a444-062cae3840e4 service nova] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Refreshing instance network info cache due to event network-changed-9073c855-90cf-41ad-8ef6-a60909d19c57. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 697.151508] env[61852]: DEBUG oslo_concurrency.lockutils [req-0dbceba5-72ec-4fbe-84a0-7501cfb9faed req-d41a7f4c-a7be-4c40-a444-062cae3840e4 service nova] Acquiring lock "refresh_cache-29cb49fe-627a-4f0f-919b-58f764cd63d0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.151665] env[61852]: DEBUG oslo_concurrency.lockutils [req-0dbceba5-72ec-4fbe-84a0-7501cfb9faed req-d41a7f4c-a7be-4c40-a444-062cae3840e4 service nova] Acquired lock "refresh_cache-29cb49fe-627a-4f0f-919b-58f764cd63d0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.152157] env[61852]: DEBUG nova.network.neutron [req-0dbceba5-72ec-4fbe-84a0-7501cfb9faed req-d41a7f4c-a7be-4c40-a444-062cae3840e4 service nova] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Refreshing network info cache for port 9073c855-90cf-41ad-8ef6-a60909d19c57 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.274269] env[61852]: DEBUG nova.network.neutron [-] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.588972] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.961s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.589637] env[61852]: ERROR nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e65885ca-7727-4b4f-b822-b30800674d2a, please check neutron logs for more information. [ 697.589637] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Traceback (most recent call last): [ 697.589637] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.589637] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] self.driver.spawn(context, instance, image_meta, [ 697.589637] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 697.589637] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.589637] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.589637] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] vm_ref = self.build_virtual_machine(instance, [ 697.589637] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.589637] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.589637] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] for vif in network_info: [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] return self._sync_wrapper(fn, *args, **kwargs) [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] self.wait() [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] self[:] = self._gt.wait() [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] return self._exit_event.wait() [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] result = hub.switch() [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.589965] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] return self.greenlet.switch() [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] result = function(*args, **kwargs) [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] return func(*args, **kwargs) [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] raise e [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] nwinfo = self.network_api.allocate_for_instance( [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] created_port_ids = self._update_ports_for_instance( [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] with excutils.save_and_reraise_exception(): [ 697.590338] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] self.force_reraise() [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] raise self.value [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] updated_port = self._update_port( [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] _ensure_no_port_binding_failure(port) [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] raise exception.PortBindingFailed(port_id=port['id']) [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] nova.exception.PortBindingFailed: Binding failed for port e65885ca-7727-4b4f-b822-b30800674d2a, please check neutron logs for more information. [ 697.590721] env[61852]: ERROR nova.compute.manager [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] [ 697.591061] env[61852]: DEBUG nova.compute.utils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Binding failed for port e65885ca-7727-4b4f-b822-b30800674d2a, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 697.591717] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.851s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.598362] env[61852]: DEBUG nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Build of instance 0b213475-347e-42c9-aa16-0abd570d1a3e was re-scheduled: Binding failed for port e65885ca-7727-4b4f-b822-b30800674d2a, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 697.598837] env[61852]: DEBUG nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 697.599085] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Acquiring lock "refresh_cache-0b213475-347e-42c9-aa16-0abd570d1a3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.599279] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Acquired lock "refresh_cache-0b213475-347e-42c9-aa16-0abd570d1a3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.599448] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 697.655831] env[61852]: DEBUG nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 697.688931] env[61852]: DEBUG nova.network.neutron [req-0dbceba5-72ec-4fbe-84a0-7501cfb9faed req-d41a7f4c-a7be-4c40-a444-062cae3840e4 service nova] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.700182] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 697.700414] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 697.700414] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.701361] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 697.701361] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.701361] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 697.701532] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 697.701658] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 697.701836] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 697.701995] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 697.702180] env[61852]: DEBUG nova.virt.hardware [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 697.703078] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356433bf-c83f-42b5-b63e-982035915998 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.711620] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01de4ce1-65dc-4244-ae5b-992857924e5b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.777463] env[61852]: INFO nova.compute.manager [-] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Took 1.02 seconds to deallocate network for instance. [ 697.781123] env[61852]: DEBUG nova.compute.claims [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 697.781123] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.787035] env[61852]: DEBUG nova.network.neutron [req-0dbceba5-72ec-4fbe-84a0-7501cfb9faed req-d41a7f4c-a7be-4c40-a444-062cae3840e4 service nova] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.924181] env[61852]: ERROR nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6eada10d-828c-47ae-98fc-a2c58e5caf26, please check neutron logs for more information. [ 697.924181] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 697.924181] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.924181] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 697.924181] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.924181] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 697.924181] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.924181] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 697.924181] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.924181] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 697.924181] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.924181] env[61852]: ERROR nova.compute.manager raise self.value [ 697.924181] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.924181] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 697.924181] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.924181] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 697.924713] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.924713] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 697.924713] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6eada10d-828c-47ae-98fc-a2c58e5caf26, please check neutron logs for more information. [ 697.924713] env[61852]: ERROR nova.compute.manager [ 697.924713] env[61852]: Traceback (most recent call last): [ 697.924713] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 697.924713] env[61852]: listener.cb(fileno) [ 697.924713] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.924713] env[61852]: result = function(*args, **kwargs) [ 697.924713] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.924713] env[61852]: return func(*args, **kwargs) [ 697.924713] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.924713] env[61852]: raise e [ 697.924713] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.924713] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 697.924713] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.924713] env[61852]: created_port_ids = self._update_ports_for_instance( [ 697.924713] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.924713] env[61852]: with excutils.save_and_reraise_exception(): [ 697.924713] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.924713] env[61852]: self.force_reraise() [ 697.924713] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.924713] env[61852]: raise self.value [ 697.924713] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.924713] env[61852]: updated_port = self._update_port( [ 697.924713] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.924713] env[61852]: _ensure_no_port_binding_failure(port) [ 697.924713] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.924713] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 697.925588] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 6eada10d-828c-47ae-98fc-a2c58e5caf26, please check neutron logs for more information. [ 697.925588] env[61852]: Removing descriptor: 19 [ 697.925588] env[61852]: ERROR nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6eada10d-828c-47ae-98fc-a2c58e5caf26, please check neutron logs for more information. [ 697.925588] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] Traceback (most recent call last): [ 697.925588] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 697.925588] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] yield resources [ 697.925588] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.925588] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] self.driver.spawn(context, instance, image_meta, [ 697.925588] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 697.925588] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.925588] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.925588] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] vm_ref = self.build_virtual_machine(instance, [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] for vif in network_info: [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] return self._sync_wrapper(fn, *args, **kwargs) [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] self.wait() [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] self[:] = self._gt.wait() [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] return self._exit_event.wait() [ 697.925950] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] result = hub.switch() [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] return self.greenlet.switch() [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] result = function(*args, **kwargs) [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] return func(*args, **kwargs) [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] raise e [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] nwinfo = self.network_api.allocate_for_instance( [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.926355] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] created_port_ids = self._update_ports_for_instance( [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] with excutils.save_and_reraise_exception(): [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] self.force_reraise() [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] raise self.value [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] updated_port = self._update_port( [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] _ensure_no_port_binding_failure(port) [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.926783] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] raise exception.PortBindingFailed(port_id=port['id']) [ 697.927149] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] nova.exception.PortBindingFailed: Binding failed for port 6eada10d-828c-47ae-98fc-a2c58e5caf26, please check neutron logs for more information. [ 697.927149] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] [ 697.927149] env[61852]: INFO nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Terminating instance [ 697.927351] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Acquiring lock "refresh_cache-beffa800-ff93-4230-be14-f2b906666cc0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.927536] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Acquired lock "refresh_cache-beffa800-ff93-4230-be14-f2b906666cc0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.927678] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.121671] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.213563] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.290044] env[61852]: DEBUG oslo_concurrency.lockutils [req-0dbceba5-72ec-4fbe-84a0-7501cfb9faed req-d41a7f4c-a7be-4c40-a444-062cae3840e4 service nova] Releasing lock "refresh_cache-29cb49fe-627a-4f0f-919b-58f764cd63d0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.290044] env[61852]: DEBUG nova.compute.manager [req-0dbceba5-72ec-4fbe-84a0-7501cfb9faed req-d41a7f4c-a7be-4c40-a444-062cae3840e4 service nova] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Received event network-vif-deleted-9073c855-90cf-41ad-8ef6-a60909d19c57 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 698.452779] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.494932] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8add5e6-f439-43ad-b806-4b122aa4562f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.503586] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c26653-11d1-445b-85a6-c28179578d2d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.539385] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e48ce85-4921-462d-a120-58e970246984 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.547565] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.550268] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c56c8f-7304-422f-8583-712b8ae8c83c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.565174] env[61852]: DEBUG nova.compute.provider_tree [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.714795] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Releasing lock "refresh_cache-0b213475-347e-42c9-aa16-0abd570d1a3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.715069] env[61852]: DEBUG nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 698.715230] env[61852]: DEBUG nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 698.715400] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 698.736199] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.056838] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Releasing lock "refresh_cache-beffa800-ff93-4230-be14-f2b906666cc0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.056838] env[61852]: DEBUG nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 699.056838] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 699.057103] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5044226-4195-478f-aca5-2960d7182e33 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.066141] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045f7d63-8179-4c35-852a-2db57be72db3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.076777] env[61852]: DEBUG nova.scheduler.client.report [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 699.092645] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance beffa800-ff93-4230-be14-f2b906666cc0 could not be found. [ 699.092860] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.093049] env[61852]: INFO nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 699.093291] env[61852]: DEBUG oslo.service.loopingcall [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 699.094122] env[61852]: DEBUG nova.compute.manager [-] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 699.094215] env[61852]: DEBUG nova.network.neutron [-] [instance: beffa800-ff93-4230-be14-f2b906666cc0] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 699.114276] env[61852]: DEBUG nova.network.neutron [-] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.173489] env[61852]: DEBUG nova.compute.manager [req-e3695218-2fc4-4e50-aa76-109046a59b20 req-9b4cc37a-2f8f-46bd-807b-76a2b3bda6ed service nova] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Received event network-changed-6eada10d-828c-47ae-98fc-a2c58e5caf26 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 699.173770] env[61852]: DEBUG nova.compute.manager [req-e3695218-2fc4-4e50-aa76-109046a59b20 req-9b4cc37a-2f8f-46bd-807b-76a2b3bda6ed service nova] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Refreshing instance network info cache due to event network-changed-6eada10d-828c-47ae-98fc-a2c58e5caf26. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 699.173894] env[61852]: DEBUG oslo_concurrency.lockutils [req-e3695218-2fc4-4e50-aa76-109046a59b20 req-9b4cc37a-2f8f-46bd-807b-76a2b3bda6ed service nova] Acquiring lock "refresh_cache-beffa800-ff93-4230-be14-f2b906666cc0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.174047] env[61852]: DEBUG oslo_concurrency.lockutils [req-e3695218-2fc4-4e50-aa76-109046a59b20 req-9b4cc37a-2f8f-46bd-807b-76a2b3bda6ed service nova] Acquired lock "refresh_cache-beffa800-ff93-4230-be14-f2b906666cc0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.174207] env[61852]: DEBUG nova.network.neutron [req-e3695218-2fc4-4e50-aa76-109046a59b20 req-9b4cc37a-2f8f-46bd-807b-76a2b3bda6ed service nova] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Refreshing network info cache for port 6eada10d-828c-47ae-98fc-a2c58e5caf26 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 699.239366] env[61852]: DEBUG nova.network.neutron [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.583239] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.991s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.583239] env[61852]: ERROR nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port efce19b8-8baf-4b37-bf7a-35689b6f6462, please check neutron logs for more information. [ 699.583239] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Traceback (most recent call last): [ 699.583239] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 699.583239] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] self.driver.spawn(context, instance, image_meta, [ 699.583239] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 699.583239] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] self._vmops.spawn(context, instance, image_meta, injected_files, [ 699.583239] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 699.583239] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] vm_ref = self.build_virtual_machine(instance, [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] vif_infos = vmwarevif.get_vif_info(self._session, [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] for vif in network_info: [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] return self._sync_wrapper(fn, *args, **kwargs) [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] self.wait() [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] self[:] = self._gt.wait() [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] return self._exit_event.wait() [ 699.583550] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] result = hub.switch() [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] return self.greenlet.switch() [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] result = function(*args, **kwargs) [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] return func(*args, **kwargs) [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] raise e [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] nwinfo = self.network_api.allocate_for_instance( [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 699.583915] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] created_port_ids = self._update_ports_for_instance( [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] with excutils.save_and_reraise_exception(): [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] self.force_reraise() [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] raise self.value [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] updated_port = self._update_port( [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] _ensure_no_port_binding_failure(port) [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.584285] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] raise exception.PortBindingFailed(port_id=port['id']) [ 699.584635] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] nova.exception.PortBindingFailed: Binding failed for port efce19b8-8baf-4b37-bf7a-35689b6f6462, please check neutron logs for more information. [ 699.584635] env[61852]: ERROR nova.compute.manager [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] [ 699.584635] env[61852]: DEBUG nova.compute.utils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Binding failed for port efce19b8-8baf-4b37-bf7a-35689b6f6462, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 699.584901] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.825s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.586849] env[61852]: INFO nova.compute.claims [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.589998] env[61852]: DEBUG nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Build of instance f9e90a57-da19-4b1a-81cb-8a6433e09785 was re-scheduled: Binding failed for port efce19b8-8baf-4b37-bf7a-35689b6f6462, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 699.590446] env[61852]: DEBUG nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 699.590678] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Acquiring lock "refresh_cache-f9e90a57-da19-4b1a-81cb-8a6433e09785" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.590836] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Acquired lock "refresh_cache-f9e90a57-da19-4b1a-81cb-8a6433e09785" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.591015] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.617474] env[61852]: DEBUG nova.network.neutron [-] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.697869] env[61852]: DEBUG nova.network.neutron [req-e3695218-2fc4-4e50-aa76-109046a59b20 req-9b4cc37a-2f8f-46bd-807b-76a2b3bda6ed service nova] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.746625] env[61852]: INFO nova.compute.manager [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] [instance: 0b213475-347e-42c9-aa16-0abd570d1a3e] Took 1.03 seconds to deallocate network for instance. [ 699.789829] env[61852]: DEBUG nova.network.neutron [req-e3695218-2fc4-4e50-aa76-109046a59b20 req-9b4cc37a-2f8f-46bd-807b-76a2b3bda6ed service nova] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.106774] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.119532] env[61852]: INFO nova.compute.manager [-] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Took 1.03 seconds to deallocate network for instance. [ 700.121475] env[61852]: DEBUG nova.compute.claims [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 700.121660] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.147967] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.294591] env[61852]: DEBUG oslo_concurrency.lockutils [req-e3695218-2fc4-4e50-aa76-109046a59b20 req-9b4cc37a-2f8f-46bd-807b-76a2b3bda6ed service nova] Releasing lock "refresh_cache-beffa800-ff93-4230-be14-f2b906666cc0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.294903] env[61852]: DEBUG nova.compute.manager [req-e3695218-2fc4-4e50-aa76-109046a59b20 req-9b4cc37a-2f8f-46bd-807b-76a2b3bda6ed service nova] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Received event network-vif-deleted-6eada10d-828c-47ae-98fc-a2c58e5caf26 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 700.649995] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Releasing lock "refresh_cache-f9e90a57-da19-4b1a-81cb-8a6433e09785" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.650255] env[61852]: DEBUG nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 700.650508] env[61852]: DEBUG nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 700.650688] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.673649] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.775858] env[61852]: INFO nova.scheduler.client.report [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Deleted allocations for instance 0b213475-347e-42c9-aa16-0abd570d1a3e [ 700.916867] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4ae3b5-dd89-4dfb-b9c5-4574fa85a12e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.924924] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e368e0-cd32-4b8e-8f70-d7a0ccc57685 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.956936] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0f6f1b-669f-4fa9-9797-aa1ebdeb4dc3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.964830] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efcb8ae-5b7e-4575-a1f4-ea0e5718e859 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.978039] env[61852]: DEBUG nova.compute.provider_tree [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.175830] env[61852]: DEBUG nova.network.neutron [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.287352] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38d7a35f-a014-41be-8ae9-f2bd1e89f064 tempest-ServerActionsTestOtherB-278465512 tempest-ServerActionsTestOtherB-278465512-project-member] Lock "0b213475-347e-42c9-aa16-0abd570d1a3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.982s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.482271] env[61852]: DEBUG nova.scheduler.client.report [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 701.680437] env[61852]: INFO nova.compute.manager [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] [instance: f9e90a57-da19-4b1a-81cb-8a6433e09785] Took 1.03 seconds to deallocate network for instance. [ 701.790112] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 701.986984] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.402s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.987604] env[61852]: DEBUG nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 701.991247] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.741s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.313377] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.497492] env[61852]: DEBUG nova.compute.utils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 702.508015] env[61852]: DEBUG nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 702.508015] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 702.568040] env[61852]: DEBUG nova.policy [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be922c40dddf48c8ae436d0a244e7b6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdac3605118e44a69d44ab56cafe2e21', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 702.721359] env[61852]: INFO nova.scheduler.client.report [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Deleted allocations for instance f9e90a57-da19-4b1a-81cb-8a6433e09785 [ 702.870021] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8fc06c-a220-4039-bd6b-8ab74cae1bd3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.874279] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Successfully created port: 4e8a73cc-718d-48f5-b710-deb25af08562 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 702.880751] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4f9323-c600-4c6d-9ef5-cfc1d18b71d1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.918734] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803c3ee5-84ff-4529-b3a8-4ee77e046969 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.925189] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32aa7b6a-5fbd-4ef1-bd4a-632cf4ebe66b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.938707] env[61852]: DEBUG nova.compute.provider_tree [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.007549] env[61852]: DEBUG nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 703.234263] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1edc7b5f-052d-46fd-b783-fc1514bb0f6f tempest-ServersAdminNegativeTestJSON-887081051 tempest-ServersAdminNegativeTestJSON-887081051-project-member] Lock "f9e90a57-da19-4b1a-81cb-8a6433e09785" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.863s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.442162] env[61852]: DEBUG nova.scheduler.client.report [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 703.573733] env[61852]: DEBUG nova.compute.manager [req-12dc6883-e6cc-4bed-b138-1f2344b6e9fb req-57e7dbee-d9de-49b3-9f83-39c2dbe564f4 service nova] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Received event network-changed-4e8a73cc-718d-48f5-b710-deb25af08562 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 703.573733] env[61852]: DEBUG nova.compute.manager [req-12dc6883-e6cc-4bed-b138-1f2344b6e9fb req-57e7dbee-d9de-49b3-9f83-39c2dbe564f4 service nova] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Refreshing instance network info cache due to event network-changed-4e8a73cc-718d-48f5-b710-deb25af08562. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 703.573733] env[61852]: DEBUG oslo_concurrency.lockutils [req-12dc6883-e6cc-4bed-b138-1f2344b6e9fb req-57e7dbee-d9de-49b3-9f83-39c2dbe564f4 service nova] Acquiring lock "refresh_cache-290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.573733] env[61852]: DEBUG oslo_concurrency.lockutils [req-12dc6883-e6cc-4bed-b138-1f2344b6e9fb req-57e7dbee-d9de-49b3-9f83-39c2dbe564f4 service nova] Acquired lock "refresh_cache-290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.573733] env[61852]: DEBUG nova.network.neutron [req-12dc6883-e6cc-4bed-b138-1f2344b6e9fb req-57e7dbee-d9de-49b3-9f83-39c2dbe564f4 service nova] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Refreshing network info cache for port 4e8a73cc-718d-48f5-b710-deb25af08562 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 703.738838] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 703.874917] env[61852]: ERROR nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4e8a73cc-718d-48f5-b710-deb25af08562, please check neutron logs for more information. [ 703.874917] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 703.874917] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 703.874917] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 703.874917] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 703.874917] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 703.874917] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 703.874917] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 703.874917] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.874917] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 703.874917] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.874917] env[61852]: ERROR nova.compute.manager raise self.value [ 703.874917] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 703.874917] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 703.874917] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.874917] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 703.875594] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.875594] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 703.875594] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4e8a73cc-718d-48f5-b710-deb25af08562, please check neutron logs for more information. [ 703.875594] env[61852]: ERROR nova.compute.manager [ 703.875594] env[61852]: Traceback (most recent call last): [ 703.875594] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 703.875594] env[61852]: listener.cb(fileno) [ 703.875594] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 703.875594] env[61852]: result = function(*args, **kwargs) [ 703.875594] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 703.875594] env[61852]: return func(*args, **kwargs) [ 703.875594] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 703.875594] env[61852]: raise e [ 703.875594] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 703.875594] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 703.875594] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 703.875594] env[61852]: created_port_ids = self._update_ports_for_instance( [ 703.875594] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 703.875594] env[61852]: with excutils.save_and_reraise_exception(): [ 703.875594] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.875594] env[61852]: self.force_reraise() [ 703.875594] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.875594] env[61852]: raise self.value [ 703.875594] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 703.875594] env[61852]: updated_port = self._update_port( [ 703.875594] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.875594] env[61852]: _ensure_no_port_binding_failure(port) [ 703.875594] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.875594] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 703.876396] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 4e8a73cc-718d-48f5-b710-deb25af08562, please check neutron logs for more information. [ 703.876396] env[61852]: Removing descriptor: 19 [ 703.947642] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.956s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.948258] env[61852]: ERROR nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e9229b79-d2a3-4cfb-967e-c21509bb241e, please check neutron logs for more information. [ 703.948258] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Traceback (most recent call last): [ 703.948258] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 703.948258] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] self.driver.spawn(context, instance, image_meta, [ 703.948258] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 703.948258] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] self._vmops.spawn(context, instance, image_meta, injected_files, [ 703.948258] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 703.948258] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] vm_ref = self.build_virtual_machine(instance, [ 703.948258] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 703.948258] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] vif_infos = vmwarevif.get_vif_info(self._session, [ 703.948258] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] for vif in network_info: [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] return self._sync_wrapper(fn, *args, **kwargs) [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] self.wait() [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] self[:] = self._gt.wait() [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] return self._exit_event.wait() [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] current.throw(*self._exc) [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 703.948640] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] result = function(*args, **kwargs) [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] return func(*args, **kwargs) [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] raise e [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] nwinfo = self.network_api.allocate_for_instance( [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] created_port_ids = self._update_ports_for_instance( [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] with excutils.save_and_reraise_exception(): [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] self.force_reraise() [ 703.949127] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.950349] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] raise self.value [ 703.950349] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 703.950349] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] updated_port = self._update_port( [ 703.950349] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.950349] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] _ensure_no_port_binding_failure(port) [ 703.950349] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.950349] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] raise exception.PortBindingFailed(port_id=port['id']) [ 703.950349] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] nova.exception.PortBindingFailed: Binding failed for port e9229b79-d2a3-4cfb-967e-c21509bb241e, please check neutron logs for more information. [ 703.950349] env[61852]: ERROR nova.compute.manager [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] [ 703.950349] env[61852]: DEBUG nova.compute.utils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Binding failed for port e9229b79-d2a3-4cfb-967e-c21509bb241e, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 703.950614] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.819s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.952048] env[61852]: INFO nova.compute.claims [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.956395] env[61852]: DEBUG nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Build of instance 593106da-0c81-448a-b3ba-fd6007dcdd98 was re-scheduled: Binding failed for port e9229b79-d2a3-4cfb-967e-c21509bb241e, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 703.956859] env[61852]: DEBUG nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 703.957098] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Acquiring lock "refresh_cache-593106da-0c81-448a-b3ba-fd6007dcdd98" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.957249] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Acquired lock "refresh_cache-593106da-0c81-448a-b3ba-fd6007dcdd98" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.957407] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 703.967571] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.967773] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.968289] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 704.018129] env[61852]: DEBUG nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 704.051467] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.051766] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.051856] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.052048] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.052209] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.052375] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.052658] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.052793] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.052942] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.053178] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.053364] env[61852]: DEBUG nova.virt.hardware [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.054312] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b2e937-acc0-4db2-9a15-700bfef3ebee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.063089] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bbd17c-126a-4727-9fad-2734c0d6474e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.079721] env[61852]: ERROR nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4e8a73cc-718d-48f5-b710-deb25af08562, please check neutron logs for more information. [ 704.079721] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Traceback (most recent call last): [ 704.079721] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 704.079721] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] yield resources [ 704.079721] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 704.079721] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] self.driver.spawn(context, instance, image_meta, [ 704.079721] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 704.079721] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.079721] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.079721] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] vm_ref = self.build_virtual_machine(instance, [ 704.079721] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] for vif in network_info: [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] return self._sync_wrapper(fn, *args, **kwargs) [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] self.wait() [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] self[:] = self._gt.wait() [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] return self._exit_event.wait() [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 704.080246] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] current.throw(*self._exc) [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] result = function(*args, **kwargs) [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] return func(*args, **kwargs) [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] raise e [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] nwinfo = self.network_api.allocate_for_instance( [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] created_port_ids = self._update_ports_for_instance( [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] with excutils.save_and_reraise_exception(): [ 704.080797] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] self.force_reraise() [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] raise self.value [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] updated_port = self._update_port( [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] _ensure_no_port_binding_failure(port) [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] raise exception.PortBindingFailed(port_id=port['id']) [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] nova.exception.PortBindingFailed: Binding failed for port 4e8a73cc-718d-48f5-b710-deb25af08562, please check neutron logs for more information. [ 704.081340] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] [ 704.081340] env[61852]: INFO nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Terminating instance [ 704.082219] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.095679] env[61852]: DEBUG nova.network.neutron [req-12dc6883-e6cc-4bed-b138-1f2344b6e9fb req-57e7dbee-d9de-49b3-9f83-39c2dbe564f4 service nova] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.171079] env[61852]: DEBUG nova.network.neutron [req-12dc6883-e6cc-4bed-b138-1f2344b6e9fb req-57e7dbee-d9de-49b3-9f83-39c2dbe564f4 service nova] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.267589] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.474806] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Didn't find any instances for network info cache update. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 704.475024] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.475191] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.475335] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.475483] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.475659] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.475822] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.475949] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 704.476104] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.481426] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.636241] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.673344] env[61852]: DEBUG oslo_concurrency.lockutils [req-12dc6883-e6cc-4bed-b138-1f2344b6e9fb req-57e7dbee-d9de-49b3-9f83-39c2dbe564f4 service nova] Releasing lock "refresh_cache-290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.674066] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.674066] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 704.978674] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.139387] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Releasing lock "refresh_cache-593106da-0c81-448a-b3ba-fd6007dcdd98" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.139620] env[61852]: DEBUG nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 705.139798] env[61852]: DEBUG nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 705.139968] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 705.167466] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.192771] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.282838] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.316134] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0d83b6-f039-45bf-bfb8-572ee1215c06 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.325226] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b895a4b-0cdc-40fd-8445-33013413356d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.354375] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb133737-d352-4204-910f-68164f605949 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.361451] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcaf9bf5-84a9-4f6a-aaab-41a7bf0e0d8d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.377604] env[61852]: DEBUG nova.compute.provider_tree [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.597768] env[61852]: DEBUG nova.compute.manager [req-76b960b3-7bab-4b3d-b28c-46a8a0a25d44 req-c83c43ee-b6b8-4a98-bef1-8ea9b68f220a service nova] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Received event network-vif-deleted-4e8a73cc-718d-48f5-b710-deb25af08562 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 705.670285] env[61852]: DEBUG nova.network.neutron [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.784807] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.785280] env[61852]: DEBUG nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 705.785509] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 705.785781] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d65a6141-f504-49d7-a66f-a4e977365aef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.799968] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22785385-2c60-46e0-b488-80e354f547f7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.824190] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9 could not be found. [ 705.824419] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 705.824597] env[61852]: INFO nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 705.825014] env[61852]: DEBUG oslo.service.loopingcall [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 705.825162] env[61852]: DEBUG nova.compute.manager [-] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 705.825252] env[61852]: DEBUG nova.network.neutron [-] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 705.842144] env[61852]: DEBUG nova.network.neutron [-] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.881235] env[61852]: DEBUG nova.scheduler.client.report [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.174058] env[61852]: INFO nova.compute.manager [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] [instance: 593106da-0c81-448a-b3ba-fd6007dcdd98] Took 1.03 seconds to deallocate network for instance. [ 706.345691] env[61852]: DEBUG nova.network.neutron [-] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.391718] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.392276] env[61852]: DEBUG nova.compute.manager [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 706.398207] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.393s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.400822] env[61852]: INFO nova.compute.claims [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.848634] env[61852]: INFO nova.compute.manager [-] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Took 1.02 seconds to deallocate network for instance. [ 706.851448] env[61852]: DEBUG nova.compute.claims [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 706.851727] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.899701] env[61852]: DEBUG nova.compute.utils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 706.901452] env[61852]: DEBUG nova.compute.manager [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Not allocating networking since 'none' was specified. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 707.217706] env[61852]: INFO nova.scheduler.client.report [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Deleted allocations for instance 593106da-0c81-448a-b3ba-fd6007dcdd98 [ 707.402872] env[61852]: DEBUG nova.compute.manager [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 707.725769] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0410f843-39a1-4d6c-af63-031454dcc90a tempest-AttachInterfacesV270Test-861015410 tempest-AttachInterfacesV270Test-861015410-project-member] Lock "593106da-0c81-448a-b3ba-fd6007dcdd98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.192s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.753995] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ecf6ab0-78f6-470a-945c-88be503ad142 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.762417] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2cc23d-7bce-444e-9d1c-6b52b4258af3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.798437] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c111990-b720-4cb0-b307-ecc27af8450a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.806529] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a9218e-ba33-49a4-850a-6d0fdd01625a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.822234] env[61852]: DEBUG nova.compute.provider_tree [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.232726] env[61852]: DEBUG nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 708.325819] env[61852]: DEBUG nova.scheduler.client.report [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.413603] env[61852]: DEBUG nova.compute.manager [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 708.454448] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 708.454448] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 708.454448] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.454448] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 708.454846] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.454846] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 708.455175] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 708.455546] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 708.455760] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 708.455934] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 708.456138] env[61852]: DEBUG nova.virt.hardware [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.457082] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb78bbc2-9c2d-4eda-a3f3-53aecd121e81 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.466429] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquiring lock "b0d38886-aacb-4b7e-9530-c5891d9cee66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.466429] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lock "b0d38886-aacb-4b7e-9530-c5891d9cee66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.477022] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d37db0-97ee-4146-afad-1e1d6c03cef6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.495039] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.498594] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Creating folder: Project (babef05153d44ad8980430d40e0e8bbc). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 708.498937] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60d72ccc-d7c0-4ccc-8f93-d15e1eebcab4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.510475] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Created folder: Project (babef05153d44ad8980430d40e0e8bbc) in parent group-v277280. [ 708.510667] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Creating folder: Instances. Parent ref: group-v277293. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 708.510899] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c985c861-6bb7-4fe9-915b-f0e3cc44f999 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.518812] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Created folder: Instances in parent group-v277293. [ 708.519051] env[61852]: DEBUG oslo.service.loopingcall [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 708.519247] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 708.519448] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85d98743-d1ed-43c6-b1d2-57482d9e9a64 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.535161] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 708.535161] env[61852]: value = "task-1292707" [ 708.535161] env[61852]: _type = "Task" [ 708.535161] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.543526] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292707, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.765459] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.831600] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.832203] env[61852]: DEBUG nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 708.835063] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.919s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.836487] env[61852]: INFO nova.compute.claims [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 709.050086] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292707, 'name': CreateVM_Task, 'duration_secs': 0.282158} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.050248] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 709.050676] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.050867] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.051246] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 709.051542] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edec696c-3aec-4b5a-90d9-2dede6d32198 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.056564] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 709.056564] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52193d5d-360b-d149-edf2-a1a401eb782b" [ 709.056564] env[61852]: _type = "Task" [ 709.056564] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.064858] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52193d5d-360b-d149-edf2-a1a401eb782b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.343443] env[61852]: DEBUG nova.compute.utils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 709.346852] env[61852]: DEBUG nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 709.347038] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 709.429049] env[61852]: DEBUG nova.policy [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21b7672068df4da0973e855e18691592', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e7a1a39ca3a443495bdba82971360a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 709.574032] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52193d5d-360b-d149-edf2-a1a401eb782b, 'name': SearchDatastore_Task, 'duration_secs': 0.032235} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.574651] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.574827] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 709.575329] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.575646] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.576223] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 709.576499] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2acbd410-49db-47f9-b5a0-46bec68ada31 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.589489] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 709.589489] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 709.590256] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bbaa241-2aa3-4a06-a5d4-614ca8d5b05d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.595920] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 709.595920] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e42217-6495-39ae-62d6-86f029ad10b8" [ 709.595920] env[61852]: _type = "Task" [ 709.595920] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.604507] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e42217-6495-39ae-62d6-86f029ad10b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.848088] env[61852]: DEBUG nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 709.972983] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Successfully created port: 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.106859] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e42217-6495-39ae-62d6-86f029ad10b8, 'name': SearchDatastore_Task, 'duration_secs': 0.012867} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.107665] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c31eb44-3987-40fc-a749-d6c8cfb9a03b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.115852] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 710.115852] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5236e234-bfe9-f12f-cf16-ecbd4cbe840c" [ 710.115852] env[61852]: _type = "Task" [ 710.115852] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.124197] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5236e234-bfe9-f12f-cf16-ecbd4cbe840c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.305653] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634b4b98-7087-4dfc-be32-fd026c20373b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.315979] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b815d230-a68a-4a8e-a4ff-558b11840e29 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.357321] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762fe438-48d8-4e6e-b810-f745681efca9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.365928] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da82c54-6bfc-4ea9-8be5-d172eb43adf5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.381329] env[61852]: DEBUG nova.compute.provider_tree [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.628539] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5236e234-bfe9-f12f-cf16-ecbd4cbe840c, 'name': SearchDatastore_Task, 'duration_secs': 0.009643} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.628793] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.629061] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 694889e8-200e-454c-9e87-60521dd044d9/694889e8-200e-454c-9e87-60521dd044d9.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 710.629404] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7983b29d-019d-4cae-b5c5-8beb5a86a5b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.637240] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 710.637240] env[61852]: value = "task-1292708" [ 710.637240] env[61852]: _type = "Task" [ 710.637240] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.645916] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292708, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.874351] env[61852]: DEBUG nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 710.888759] env[61852]: DEBUG nova.scheduler.client.report [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 710.897975] env[61852]: ERROR nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de, please check neutron logs for more information. [ 710.897975] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 710.897975] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 710.897975] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 710.897975] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.897975] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 710.897975] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.897975] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 710.897975] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.897975] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 710.897975] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.897975] env[61852]: ERROR nova.compute.manager raise self.value [ 710.897975] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.897975] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 710.897975] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.897975] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 710.898486] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.898486] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 710.898486] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de, please check neutron logs for more information. [ 710.898486] env[61852]: ERROR nova.compute.manager [ 710.898486] env[61852]: Traceback (most recent call last): [ 710.898486] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 710.898486] env[61852]: listener.cb(fileno) [ 710.898486] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 710.898486] env[61852]: result = function(*args, **kwargs) [ 710.898486] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 710.898486] env[61852]: return func(*args, **kwargs) [ 710.898486] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 710.898486] env[61852]: raise e [ 710.898486] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 710.898486] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 710.898486] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.898486] env[61852]: created_port_ids = self._update_ports_for_instance( [ 710.898486] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.898486] env[61852]: with excutils.save_and_reraise_exception(): [ 710.898486] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.898486] env[61852]: self.force_reraise() [ 710.898486] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.898486] env[61852]: raise self.value [ 710.898486] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.898486] env[61852]: updated_port = self._update_port( [ 710.898486] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.898486] env[61852]: _ensure_no_port_binding_failure(port) [ 710.898486] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.898486] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 710.899350] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de, please check neutron logs for more information. [ 710.899350] env[61852]: Removing descriptor: 19 [ 710.917295] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 710.917593] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 710.917773] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.917991] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 710.918164] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.918312] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 710.918554] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 710.918721] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 710.918890] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 710.919066] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 710.919241] env[61852]: DEBUG nova.virt.hardware [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 710.920371] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba1b372-ddc0-4bfa-8803-0e70c73de028 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.932497] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e10eb02-68aa-425e-9d1e-1b6485aff71f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.952413] env[61852]: ERROR nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de, please check neutron logs for more information. [ 710.952413] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Traceback (most recent call last): [ 710.952413] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 710.952413] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] yield resources [ 710.952413] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 710.952413] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] self.driver.spawn(context, instance, image_meta, [ 710.952413] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 710.952413] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 710.952413] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 710.952413] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] vm_ref = self.build_virtual_machine(instance, [ 710.952413] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] vif_infos = vmwarevif.get_vif_info(self._session, [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] for vif in network_info: [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] return self._sync_wrapper(fn, *args, **kwargs) [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] self.wait() [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] self[:] = self._gt.wait() [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] return self._exit_event.wait() [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 710.952853] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] current.throw(*self._exc) [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] result = function(*args, **kwargs) [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] return func(*args, **kwargs) [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] raise e [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] nwinfo = self.network_api.allocate_for_instance( [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] created_port_ids = self._update_ports_for_instance( [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] with excutils.save_and_reraise_exception(): [ 710.953261] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] self.force_reraise() [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] raise self.value [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] updated_port = self._update_port( [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] _ensure_no_port_binding_failure(port) [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] raise exception.PortBindingFailed(port_id=port['id']) [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] nova.exception.PortBindingFailed: Binding failed for port 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de, please check neutron logs for more information. [ 710.953594] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] [ 710.953594] env[61852]: INFO nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Terminating instance [ 710.955330] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Acquiring lock "refresh_cache-d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.955578] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Acquired lock "refresh_cache-d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.955789] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.064334] env[61852]: DEBUG nova.compute.manager [req-08d95919-abef-4938-b94d-d2050d070d92 req-93f98961-ebdf-4f44-8e2a-e7c2237cbf81 service nova] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Received event network-changed-0e4e011d-e562-4d8b-ae0d-b0da4dbe80de {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 711.064442] env[61852]: DEBUG nova.compute.manager [req-08d95919-abef-4938-b94d-d2050d070d92 req-93f98961-ebdf-4f44-8e2a-e7c2237cbf81 service nova] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Refreshing instance network info cache due to event network-changed-0e4e011d-e562-4d8b-ae0d-b0da4dbe80de. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 711.064550] env[61852]: DEBUG oslo_concurrency.lockutils [req-08d95919-abef-4938-b94d-d2050d070d92 req-93f98961-ebdf-4f44-8e2a-e7c2237cbf81 service nova] Acquiring lock "refresh_cache-d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.147243] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292708, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.396472] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.396998] env[61852]: DEBUG nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 711.400252] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.932s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.478304] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.608184] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.647429] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292708, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519132} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.647696] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 694889e8-200e-454c-9e87-60521dd044d9/694889e8-200e-454c-9e87-60521dd044d9.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 711.647926] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 711.648155] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e22fa9b3-9008-4040-99b1-79bef88dbaf6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.655458] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 711.655458] env[61852]: value = "task-1292709" [ 711.655458] env[61852]: _type = "Task" [ 711.655458] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.663366] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292709, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.905765] env[61852]: DEBUG nova.compute.utils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 711.914428] env[61852]: DEBUG nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 711.914610] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 712.027205] env[61852]: DEBUG nova.policy [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1349b8262e345068742af657fa8cbd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4dbb543c66364861bf5f437c8c33a550', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 712.116018] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Releasing lock "refresh_cache-d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.116018] env[61852]: DEBUG nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 712.116018] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.116018] env[61852]: DEBUG oslo_concurrency.lockutils [req-08d95919-abef-4938-b94d-d2050d070d92 req-93f98961-ebdf-4f44-8e2a-e7c2237cbf81 service nova] Acquired lock "refresh_cache-d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.116018] env[61852]: DEBUG nova.network.neutron [req-08d95919-abef-4938-b94d-d2050d070d92 req-93f98961-ebdf-4f44-8e2a-e7c2237cbf81 service nova] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Refreshing network info cache for port 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.116429] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa388827-427c-4388-a48b-177ef2980120 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.129308] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed8cfa3-9c95-438d-9ed8-12490421e84e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.155125] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb could not be found. [ 712.155354] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 712.155535] env[61852]: INFO nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 712.155774] env[61852]: DEBUG oslo.service.loopingcall [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 712.158296] env[61852]: DEBUG nova.compute.manager [-] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 712.158397] env[61852]: DEBUG nova.network.neutron [-] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 712.168145] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292709, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062412} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.168431] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 712.169407] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8f2d83-b1b0-4ec2-a42e-f3edc3d6b785 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.192855] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 694889e8-200e-454c-9e87-60521dd044d9/694889e8-200e-454c-9e87-60521dd044d9.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 712.196883] env[61852]: DEBUG nova.network.neutron [-] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.198090] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3161c723-0faa-41c6-aa92-0cce0cd2f2d7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.219404] env[61852]: DEBUG nova.network.neutron [-] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.228485] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 712.228485] env[61852]: value = "task-1292710" [ 712.228485] env[61852]: _type = "Task" [ 712.228485] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.236015] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292710, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.356930] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83ff11f-2c46-42cf-b70b-a73d43c509e2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.364702] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138ca9f4-86f5-42c5-8f41-00fa7f535a11 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.395356] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9393704f-ae7e-4608-a278-164f4c47e9c1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.402991] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2ea6fa-7892-459f-8dc2-8ea238b98ad6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.416134] env[61852]: DEBUG nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 712.418763] env[61852]: DEBUG nova.compute.provider_tree [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.637777] env[61852]: DEBUG nova.network.neutron [req-08d95919-abef-4938-b94d-d2050d070d92 req-93f98961-ebdf-4f44-8e2a-e7c2237cbf81 service nova] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.722134] env[61852]: INFO nova.compute.manager [-] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Took 0.56 seconds to deallocate network for instance. [ 712.724827] env[61852]: DEBUG nova.compute.claims [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 712.725045] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.736481] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292710, 'name': ReconfigVM_Task, 'duration_secs': 0.291378} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.736949] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 694889e8-200e-454c-9e87-60521dd044d9/694889e8-200e-454c-9e87-60521dd044d9.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 712.737365] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a8fed2f-36db-42b6-b9b5-b02dcfdb25f2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.743570] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 712.743570] env[61852]: value = "task-1292711" [ 712.743570] env[61852]: _type = "Task" [ 712.743570] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.751276] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292711, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.752058] env[61852]: DEBUG nova.network.neutron [req-08d95919-abef-4938-b94d-d2050d070d92 req-93f98961-ebdf-4f44-8e2a-e7c2237cbf81 service nova] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.825997] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Successfully created port: 8025adf1-c695-4094-84cc-7d345f318195 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.928052] env[61852]: DEBUG nova.scheduler.client.report [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 713.103614] env[61852]: DEBUG nova.compute.manager [req-7cff41a3-1179-419d-ba3c-4c6aa7eeca55 req-0681be38-b11f-4452-8e55-b3cd7aac6867 service nova] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Received event network-vif-deleted-0e4e011d-e562-4d8b-ae0d-b0da4dbe80de {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 713.253594] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292711, 'name': Rename_Task, 'duration_secs': 0.127596} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.253874] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 713.254332] env[61852]: DEBUG oslo_concurrency.lockutils [req-08d95919-abef-4938-b94d-d2050d070d92 req-93f98961-ebdf-4f44-8e2a-e7c2237cbf81 service nova] Releasing lock "refresh_cache-d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.254651] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d24069e-e06d-427f-a4e2-d67a471606c9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.260890] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 713.260890] env[61852]: value = "task-1292712" [ 713.260890] env[61852]: _type = "Task" [ 713.260890] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.268378] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292712, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.433204] env[61852]: DEBUG nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 713.435927] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.036s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.436601] env[61852]: ERROR nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ed4d0907-7acf-46b5-b753-014f317badbd, please check neutron logs for more information. [ 713.436601] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] Traceback (most recent call last): [ 713.436601] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 713.436601] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] self.driver.spawn(context, instance, image_meta, [ 713.436601] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 713.436601] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 713.436601] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 713.436601] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] vm_ref = self.build_virtual_machine(instance, [ 713.436601] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 713.436601] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] vif_infos = vmwarevif.get_vif_info(self._session, [ 713.436601] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] for vif in network_info: [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] return self._sync_wrapper(fn, *args, **kwargs) [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] self.wait() [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] self[:] = self._gt.wait() [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] return self._exit_event.wait() [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] result = hub.switch() [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 713.437112] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] return self.greenlet.switch() [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] result = function(*args, **kwargs) [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] return func(*args, **kwargs) [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] raise e [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] nwinfo = self.network_api.allocate_for_instance( [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] created_port_ids = self._update_ports_for_instance( [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] with excutils.save_and_reraise_exception(): [ 713.437625] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] self.force_reraise() [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] raise self.value [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] updated_port = self._update_port( [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] _ensure_no_port_binding_failure(port) [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] raise exception.PortBindingFailed(port_id=port['id']) [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] nova.exception.PortBindingFailed: Binding failed for port ed4d0907-7acf-46b5-b753-014f317badbd, please check neutron logs for more information. [ 713.438272] env[61852]: ERROR nova.compute.manager [instance: 48b40da3-1efc-4557-a791-e88158338aec] [ 713.439137] env[61852]: DEBUG nova.compute.utils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Binding failed for port ed4d0907-7acf-46b5-b753-014f317badbd, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 713.439137] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.658s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.444075] env[61852]: DEBUG nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Build of instance 48b40da3-1efc-4557-a791-e88158338aec was re-scheduled: Binding failed for port ed4d0907-7acf-46b5-b753-014f317badbd, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 713.444075] env[61852]: DEBUG nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 713.444075] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "refresh_cache-48b40da3-1efc-4557-a791-e88158338aec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.444075] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquired lock "refresh_cache-48b40da3-1efc-4557-a791-e88158338aec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.444303] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 713.475487] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 713.475760] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 713.475956] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 713.476199] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 713.476358] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 713.476543] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 713.476791] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 713.476970] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 713.477293] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 713.477396] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 713.477569] env[61852]: DEBUG nova.virt.hardware [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 713.478533] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649018df-2649-464d-9dee-fc1feb6e1849 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.487722] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b12c41-c0a5-42cc-a592-3b36e9fb1cee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.771410] env[61852]: DEBUG oslo_vmware.api [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292712, 'name': PowerOnVM_Task, 'duration_secs': 0.4414} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.771705] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 713.771913] env[61852]: INFO nova.compute.manager [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Took 5.36 seconds to spawn the instance on the hypervisor. [ 713.772120] env[61852]: DEBUG nova.compute.manager [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 713.772823] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c66108-af2f-41c6-8c34-1da7f3627234 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.966592] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.063028] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.302084] env[61852]: INFO nova.compute.manager [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Took 25.19 seconds to build instance. [ 714.352158] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f11d72-1a29-4e86-b912-57a752592bd1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.360063] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ef263b-ef68-4d88-8ef5-b023af0b7479 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.393593] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b8976d-0543-4496-839c-17a9eb71d0bc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.402748] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92e82ac-e237-4c84-9dd8-b3f66368cf02 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.419405] env[61852]: DEBUG nova.compute.provider_tree [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.566594] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Releasing lock "refresh_cache-48b40da3-1efc-4557-a791-e88158338aec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.566856] env[61852]: DEBUG nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 714.567178] env[61852]: DEBUG nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 714.567294] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.588433] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.618269] env[61852]: ERROR nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8025adf1-c695-4094-84cc-7d345f318195, please check neutron logs for more information. [ 714.618269] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 714.618269] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 714.618269] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 714.618269] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 714.618269] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 714.618269] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 714.618269] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 714.618269] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.618269] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 714.618269] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.618269] env[61852]: ERROR nova.compute.manager raise self.value [ 714.618269] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 714.618269] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 714.618269] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.618269] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 714.618974] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.618974] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 714.618974] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8025adf1-c695-4094-84cc-7d345f318195, please check neutron logs for more information. [ 714.618974] env[61852]: ERROR nova.compute.manager [ 714.618974] env[61852]: Traceback (most recent call last): [ 714.618974] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 714.618974] env[61852]: listener.cb(fileno) [ 714.618974] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.618974] env[61852]: result = function(*args, **kwargs) [ 714.618974] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 714.618974] env[61852]: return func(*args, **kwargs) [ 714.618974] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 714.618974] env[61852]: raise e [ 714.618974] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 714.618974] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 714.618974] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 714.618974] env[61852]: created_port_ids = self._update_ports_for_instance( [ 714.618974] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 714.618974] env[61852]: with excutils.save_and_reraise_exception(): [ 714.618974] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.618974] env[61852]: self.force_reraise() [ 714.618974] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.618974] env[61852]: raise self.value [ 714.618974] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 714.618974] env[61852]: updated_port = self._update_port( [ 714.618974] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.618974] env[61852]: _ensure_no_port_binding_failure(port) [ 714.618974] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.618974] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 714.619990] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 8025adf1-c695-4094-84cc-7d345f318195, please check neutron logs for more information. [ 714.619990] env[61852]: Removing descriptor: 19 [ 714.619990] env[61852]: ERROR nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8025adf1-c695-4094-84cc-7d345f318195, please check neutron logs for more information. [ 714.619990] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Traceback (most recent call last): [ 714.619990] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 714.619990] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] yield resources [ 714.619990] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 714.619990] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] self.driver.spawn(context, instance, image_meta, [ 714.619990] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 714.619990] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 714.619990] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 714.619990] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] vm_ref = self.build_virtual_machine(instance, [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] vif_infos = vmwarevif.get_vif_info(self._session, [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] for vif in network_info: [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] return self._sync_wrapper(fn, *args, **kwargs) [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] self.wait() [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] self[:] = self._gt.wait() [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] return self._exit_event.wait() [ 714.620329] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] result = hub.switch() [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] return self.greenlet.switch() [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] result = function(*args, **kwargs) [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] return func(*args, **kwargs) [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] raise e [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] nwinfo = self.network_api.allocate_for_instance( [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 714.620681] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] created_port_ids = self._update_ports_for_instance( [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] with excutils.save_and_reraise_exception(): [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] self.force_reraise() [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] raise self.value [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] updated_port = self._update_port( [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] _ensure_no_port_binding_failure(port) [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.621065] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] raise exception.PortBindingFailed(port_id=port['id']) [ 714.621401] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] nova.exception.PortBindingFailed: Binding failed for port 8025adf1-c695-4094-84cc-7d345f318195, please check neutron logs for more information. [ 714.621401] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] [ 714.621401] env[61852]: INFO nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Terminating instance [ 714.621490] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "refresh_cache-a77ddc8b-f3b2-4e13-944d-5cafecf59fae" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.621697] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "refresh_cache-a77ddc8b-f3b2-4e13-944d-5cafecf59fae" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.621992] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 714.805428] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aa083c3a-a273-4035-be52-ad27994d35e8 tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "694889e8-200e-454c-9e87-60521dd044d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.072s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.923291] env[61852]: DEBUG nova.scheduler.client.report [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.092469] env[61852]: DEBUG nova.network.neutron [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.140411] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.201807] env[61852]: INFO nova.compute.manager [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Rebuilding instance [ 715.225688] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.248749] env[61852]: DEBUG nova.compute.manager [req-78cdf8b4-b8fc-4c54-b024-3d622a436269 req-a3847934-36a4-48cd-ad47-0517b929e76f service nova] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Received event network-changed-8025adf1-c695-4094-84cc-7d345f318195 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 715.248749] env[61852]: DEBUG nova.compute.manager [req-78cdf8b4-b8fc-4c54-b024-3d622a436269 req-a3847934-36a4-48cd-ad47-0517b929e76f service nova] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Refreshing instance network info cache due to event network-changed-8025adf1-c695-4094-84cc-7d345f318195. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 715.248834] env[61852]: DEBUG oslo_concurrency.lockutils [req-78cdf8b4-b8fc-4c54-b024-3d622a436269 req-a3847934-36a4-48cd-ad47-0517b929e76f service nova] Acquiring lock "refresh_cache-a77ddc8b-f3b2-4e13-944d-5cafecf59fae" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.255437] env[61852]: DEBUG nova.compute.manager [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 715.256484] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612731e2-c145-431e-bff6-b37a828a5c93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.307235] env[61852]: DEBUG nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 715.428302] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.989s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.429142] env[61852]: ERROR nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9073c855-90cf-41ad-8ef6-a60909d19c57, please check neutron logs for more information. [ 715.429142] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Traceback (most recent call last): [ 715.429142] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 715.429142] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] self.driver.spawn(context, instance, image_meta, [ 715.429142] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 715.429142] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 715.429142] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 715.429142] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] vm_ref = self.build_virtual_machine(instance, [ 715.429142] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 715.429142] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] vif_infos = vmwarevif.get_vif_info(self._session, [ 715.429142] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] for vif in network_info: [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] return self._sync_wrapper(fn, *args, **kwargs) [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] self.wait() [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] self[:] = self._gt.wait() [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] return self._exit_event.wait() [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] result = hub.switch() [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 715.429522] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] return self.greenlet.switch() [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] result = function(*args, **kwargs) [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] return func(*args, **kwargs) [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] raise e [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] nwinfo = self.network_api.allocate_for_instance( [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] created_port_ids = self._update_ports_for_instance( [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] with excutils.save_and_reraise_exception(): [ 715.429909] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] self.force_reraise() [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] raise self.value [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] updated_port = self._update_port( [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] _ensure_no_port_binding_failure(port) [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] raise exception.PortBindingFailed(port_id=port['id']) [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] nova.exception.PortBindingFailed: Binding failed for port 9073c855-90cf-41ad-8ef6-a60909d19c57, please check neutron logs for more information. [ 715.430312] env[61852]: ERROR nova.compute.manager [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] [ 715.432522] env[61852]: DEBUG nova.compute.utils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Binding failed for port 9073c855-90cf-41ad-8ef6-a60909d19c57, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 715.433214] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.312s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.437433] env[61852]: DEBUG nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Build of instance 29cb49fe-627a-4f0f-919b-58f764cd63d0 was re-scheduled: Binding failed for port 9073c855-90cf-41ad-8ef6-a60909d19c57, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 715.438113] env[61852]: DEBUG nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 715.438489] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Acquiring lock "refresh_cache-29cb49fe-627a-4f0f-919b-58f764cd63d0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.438802] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Acquired lock "refresh_cache-29cb49fe-627a-4f0f-919b-58f764cd63d0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.439114] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 715.595283] env[61852]: INFO nova.compute.manager [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 48b40da3-1efc-4557-a791-e88158338aec] Took 1.03 seconds to deallocate network for instance. [ 715.736626] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "refresh_cache-a77ddc8b-f3b2-4e13-944d-5cafecf59fae" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.737079] env[61852]: DEBUG nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 715.737277] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 715.737592] env[61852]: DEBUG oslo_concurrency.lockutils [req-78cdf8b4-b8fc-4c54-b024-3d622a436269 req-a3847934-36a4-48cd-ad47-0517b929e76f service nova] Acquired lock "refresh_cache-a77ddc8b-f3b2-4e13-944d-5cafecf59fae" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.737759] env[61852]: DEBUG nova.network.neutron [req-78cdf8b4-b8fc-4c54-b024-3d622a436269 req-a3847934-36a4-48cd-ad47-0517b929e76f service nova] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Refreshing network info cache for port 8025adf1-c695-4094-84cc-7d345f318195 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 715.739031] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84864d98-9ed8-4312-a7b1-ab53ae264a5f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.756497] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250043c5-f768-49d4-b994-9343fb938a9b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.773309] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 715.773883] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-180b70d6-2381-41c4-9275-34ad1ba0cab8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.779961] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 715.779961] env[61852]: value = "task-1292713" [ 715.779961] env[61852]: _type = "Task" [ 715.779961] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.787414] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a77ddc8b-f3b2-4e13-944d-5cafecf59fae could not be found. [ 715.787625] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 715.787804] env[61852]: INFO nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Took 0.05 seconds to destroy the instance on the hypervisor. [ 715.788055] env[61852]: DEBUG oslo.service.loopingcall [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 715.788605] env[61852]: DEBUG nova.compute.manager [-] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 715.788708] env[61852]: DEBUG nova.network.neutron [-] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 715.795252] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292713, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.803904] env[61852]: DEBUG nova.network.neutron [-] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.843635] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.968713] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.059415] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "8d733f93-7636-447b-a5d5-53c16c30061f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.059656] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "8d733f93-7636-447b-a5d5-53c16c30061f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.075566] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.269334] env[61852]: DEBUG nova.network.neutron [req-78cdf8b4-b8fc-4c54-b024-3d622a436269 req-a3847934-36a4-48cd-ad47-0517b929e76f service nova] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.293793] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292713, 'name': PowerOffVM_Task, 'duration_secs': 0.19331} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.294068] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 716.294276] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 716.295073] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2dded5-be09-4767-8792-0cd5327a3820 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.303150] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 716.303452] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffab94f8-25bc-4a6b-9d65-f3850811fbf8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.306262] env[61852]: DEBUG nova.network.neutron [-] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.334213] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 716.334213] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 716.334213] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Deleting the datastore file [datastore1] 694889e8-200e-454c-9e87-60521dd044d9 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 716.336368] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b4adc6c-84e3-4c9b-8fa7-83ecb3e01807 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.343447] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 716.343447] env[61852]: value = "task-1292715" [ 716.343447] env[61852]: _type = "Task" [ 716.343447] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.354547] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292715, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.361970] env[61852]: DEBUG nova.network.neutron [req-78cdf8b4-b8fc-4c54-b024-3d622a436269 req-a3847934-36a4-48cd-ad47-0517b929e76f service nova] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.381308] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ef03bb-96b0-4691-93e3-6ae5c240c2b5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.392973] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db6a045-6504-4eaf-940d-7cdd662ea8cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.429296] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eee0417-1fbd-4e00-ad3c-17020249a054 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.437278] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6998afd-2040-445c-b157-5c6ed1cf2424 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.450900] env[61852]: DEBUG nova.compute.provider_tree [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.578684] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Releasing lock "refresh_cache-29cb49fe-627a-4f0f-919b-58f764cd63d0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.578928] env[61852]: DEBUG nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 716.580185] env[61852]: DEBUG nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 716.580435] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 716.598327] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.634776] env[61852]: INFO nova.scheduler.client.report [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Deleted allocations for instance 48b40da3-1efc-4557-a791-e88158338aec [ 716.810596] env[61852]: INFO nova.compute.manager [-] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Took 1.02 seconds to deallocate network for instance. [ 716.813073] env[61852]: DEBUG nova.compute.claims [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 716.813255] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.856221] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292715, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107858} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.856466] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 716.856644] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 716.856816] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 716.866958] env[61852]: DEBUG oslo_concurrency.lockutils [req-78cdf8b4-b8fc-4c54-b024-3d622a436269 req-a3847934-36a4-48cd-ad47-0517b929e76f service nova] Releasing lock "refresh_cache-a77ddc8b-f3b2-4e13-944d-5cafecf59fae" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.867204] env[61852]: DEBUG nova.compute.manager [req-78cdf8b4-b8fc-4c54-b024-3d622a436269 req-a3847934-36a4-48cd-ad47-0517b929e76f service nova] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Received event network-vif-deleted-8025adf1-c695-4094-84cc-7d345f318195 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 716.956716] env[61852]: DEBUG nova.scheduler.client.report [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.103211] env[61852]: DEBUG nova.network.neutron [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.150475] env[61852]: DEBUG oslo_concurrency.lockutils [None req-38616bbe-3197-4f33-a294-618ea7aa1eec tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "48b40da3-1efc-4557-a791-e88158338aec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 145.397s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.463499] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.030s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.464185] env[61852]: ERROR nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6eada10d-828c-47ae-98fc-a2c58e5caf26, please check neutron logs for more information. [ 717.464185] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] Traceback (most recent call last): [ 717.464185] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 717.464185] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] self.driver.spawn(context, instance, image_meta, [ 717.464185] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 717.464185] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 717.464185] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 717.464185] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] vm_ref = self.build_virtual_machine(instance, [ 717.464185] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 717.464185] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] vif_infos = vmwarevif.get_vif_info(self._session, [ 717.464185] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] for vif in network_info: [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] return self._sync_wrapper(fn, *args, **kwargs) [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] self.wait() [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] self[:] = self._gt.wait() [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] return self._exit_event.wait() [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] result = hub.switch() [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 717.464527] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] return self.greenlet.switch() [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] result = function(*args, **kwargs) [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] return func(*args, **kwargs) [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] raise e [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] nwinfo = self.network_api.allocate_for_instance( [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] created_port_ids = self._update_ports_for_instance( [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] with excutils.save_and_reraise_exception(): [ 717.464945] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] self.force_reraise() [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] raise self.value [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] updated_port = self._update_port( [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] _ensure_no_port_binding_failure(port) [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] raise exception.PortBindingFailed(port_id=port['id']) [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] nova.exception.PortBindingFailed: Binding failed for port 6eada10d-828c-47ae-98fc-a2c58e5caf26, please check neutron logs for more information. [ 717.465362] env[61852]: ERROR nova.compute.manager [instance: beffa800-ff93-4230-be14-f2b906666cc0] [ 717.465712] env[61852]: DEBUG nova.compute.utils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Binding failed for port 6eada10d-828c-47ae-98fc-a2c58e5caf26, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 717.466706] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.154s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.471362] env[61852]: INFO nova.compute.claims [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.474967] env[61852]: DEBUG nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Build of instance beffa800-ff93-4230-be14-f2b906666cc0 was re-scheduled: Binding failed for port 6eada10d-828c-47ae-98fc-a2c58e5caf26, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 717.477142] env[61852]: DEBUG nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 717.477142] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Acquiring lock "refresh_cache-beffa800-ff93-4230-be14-f2b906666cc0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.477142] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Acquired lock "refresh_cache-beffa800-ff93-4230-be14-f2b906666cc0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.477142] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 717.605315] env[61852]: INFO nova.compute.manager [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] [instance: 29cb49fe-627a-4f0f-919b-58f764cd63d0] Took 1.02 seconds to deallocate network for instance. [ 717.656119] env[61852]: DEBUG nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.896109] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.896260] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.896421] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.896635] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.896786] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.896932] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.897152] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.897308] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.897470] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.897629] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.897924] env[61852]: DEBUG nova.virt.hardware [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.898982] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6dd967c-256d-43f4-9186-81a88b4dd441 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.908467] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b1f751-107e-4fd2-a6eb-44892da00cf9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.924280] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.929759] env[61852]: DEBUG oslo.service.loopingcall [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 717.930016] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 717.930256] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-499af415-7572-401a-8ed4-d999a157730a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.948560] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.948560] env[61852]: value = "task-1292716" [ 717.948560] env[61852]: _type = "Task" [ 717.948560] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.956585] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292716, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.007218] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.114485] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.184242] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.458471] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292716, 'name': CreateVM_Task, 'duration_secs': 0.258835} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.458829] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 718.459783] env[61852]: DEBUG oslo_vmware.service [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63c8c87-280c-4b8c-9855-b68674f21b06 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.465280] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.465487] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.465869] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 718.466146] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c908a864-c692-4c8a-b425-73a44ddc74d4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.470257] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 718.470257] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52232910-eaa7-193e-1e63-06993283e199" [ 718.470257] env[61852]: _type = "Task" [ 718.470257] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.479159] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52232910-eaa7-193e-1e63-06993283e199, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.621505] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Releasing lock "refresh_cache-beffa800-ff93-4230-be14-f2b906666cc0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.622646] env[61852]: DEBUG nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 718.623098] env[61852]: DEBUG nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 718.623418] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 718.644048] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.659690] env[61852]: INFO nova.scheduler.client.report [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Deleted allocations for instance 29cb49fe-627a-4f0f-919b-58f764cd63d0 [ 718.889228] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b51ba08-b94d-448e-91aa-00f4c85521dd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.897759] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a807a9d-e18a-454e-9423-064ce5352c3c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.926519] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7658b9d4-6ad1-470c-86d3-e469a2aa4121 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.933962] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7897456d-b512-4929-9531-45c7ca4b73d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.948089] env[61852]: DEBUG nova.compute.provider_tree [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.982905] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.983186] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.983481] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.983630] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.984128] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.984128] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-764f2408-78e5-46ed-ba0c-69204f256737 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.003975] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 719.003975] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 719.003975] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48b8a64-55b9-4cc0-b553-1a931fe09561 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.008605] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71ffb24c-7e8f-456a-ae28-400790503b4d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.013342] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 719.013342] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d54f63-3264-9c06-269f-9df034019e41" [ 719.013342] env[61852]: _type = "Task" [ 719.013342] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.020779] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d54f63-3264-9c06-269f-9df034019e41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.145512] env[61852]: DEBUG nova.network.neutron [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.171559] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fbad4d05-12fe-4dbc-aedb-a9aeed4f8f54 tempest-ServersTestManualDisk-1514554919 tempest-ServersTestManualDisk-1514554919-project-member] Lock "29cb49fe-627a-4f0f-919b-58f764cd63d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.391s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.381864] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "e265a4be-7b37-40b5-a199-42a7cd945f66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.382124] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.451182] env[61852]: DEBUG nova.scheduler.client.report [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 719.523638] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Preparing fetch location {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 719.523906] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Creating directory with path [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 719.524161] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bce72122-0d6f-45fc-a2c9-8c2038be4209 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.545497] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Created directory with path [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 719.545497] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Fetch image to [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 719.545497] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Downloading image file data 90fd8f39-16b3-43e0-a682-0ec131005e31 to [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk on the data store datastore2 {{(pid=61852) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 719.545497] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c400e57-9994-4dd3-8f11-f2d9b4acf9fa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.552856] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e39d362-084f-4841-98a3-f4a0443e75b0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.560978] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0c2f1f-db8f-4371-bf77-3501a40a7b5d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.591964] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709ac3d8-262f-461b-aab2-fb2681fc6837 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.599091] env[61852]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5ffc05d0-61ad-4c8d-a15e-1ddac8ad18fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.619665] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Downloading image file data 90fd8f39-16b3-43e0-a682-0ec131005e31 to the data store datastore2 {{(pid=61852) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 719.648473] env[61852]: INFO nova.compute.manager [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] [instance: beffa800-ff93-4230-be14-f2b906666cc0] Took 1.02 seconds to deallocate network for instance. [ 719.676079] env[61852]: DEBUG nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 719.681551] env[61852]: DEBUG oslo_vmware.rw_handles [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61852) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 719.959583] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.960271] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 719.963688] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.696s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.965703] env[61852]: INFO nova.compute.claims [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.199434] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.446719] env[61852]: DEBUG oslo_vmware.rw_handles [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Completed reading data from the image iterator. {{(pid=61852) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 720.446817] env[61852]: DEBUG oslo_vmware.rw_handles [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 720.470053] env[61852]: DEBUG nova.compute.utils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 720.474949] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 720.474949] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 720.505672] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Downloaded image file data 90fd8f39-16b3-43e0-a682-0ec131005e31 to vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk on the data store datastore2 {{(pid=61852) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 720.507855] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Caching image {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 720.508155] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Copying Virtual Disk [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk to [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 720.508433] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd562e39-761f-49a3-a746-51dc9ad09b87 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.516544] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 720.516544] env[61852]: value = "task-1292717" [ 720.516544] env[61852]: _type = "Task" [ 720.516544] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.521362] env[61852]: DEBUG nova.policy [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52bf0aad002740e28da26a9e1d6b14da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '856d91d948e84ab69536db1faebf54ee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 720.528124] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292717, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.688347] env[61852]: INFO nova.scheduler.client.report [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Deleted allocations for instance beffa800-ff93-4230-be14-f2b906666cc0 [ 720.943815] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Successfully created port: 0d927d4f-0ee3-47fd-8f50-9c9eac097544 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.976629] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 721.030511] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292717, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.197087] env[61852]: DEBUG oslo_concurrency.lockutils [None req-130c310b-fe2c-43e8-bb52-97967150e90a tempest-AttachInterfacesUnderV243Test-1934240239 tempest-AttachInterfacesUnderV243Test-1934240239-project-member] Lock "beffa800-ff93-4230-be14-f2b906666cc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 143.555s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.348518] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b988e417-bf85-420c-883c-bd452194a4c8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.358237] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fee77f-53ca-4219-98fb-bd69d9d5554b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.395290] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b26918a-df97-4d10-a61c-70f34f358d5a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.404068] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1872a9c-96c7-49b7-987b-641e51a3c2c6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.424916] env[61852]: DEBUG nova.compute.provider_tree [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.527639] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292717, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647603} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.527903] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Copied Virtual Disk [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk to [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 721.528090] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Deleting the datastore file [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31/tmp-sparse.vmdk {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 721.528341] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33ee3a75-0558-4396-a4a1-e4b72d781d92 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.535550] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 721.535550] env[61852]: value = "task-1292718" [ 721.535550] env[61852]: _type = "Task" [ 721.535550] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.543838] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.699976] env[61852]: DEBUG nova.compute.manager [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 721.891628] env[61852]: DEBUG nova.compute.manager [req-03c384d1-f17e-4dcb-8bd0-638f68b6080a req-518c2ea1-0044-42a7-8da6-c7a1f82eab9d service nova] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Received event network-changed-0d927d4f-0ee3-47fd-8f50-9c9eac097544 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 721.891841] env[61852]: DEBUG nova.compute.manager [req-03c384d1-f17e-4dcb-8bd0-638f68b6080a req-518c2ea1-0044-42a7-8da6-c7a1f82eab9d service nova] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Refreshing instance network info cache due to event network-changed-0d927d4f-0ee3-47fd-8f50-9c9eac097544. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 721.892073] env[61852]: DEBUG oslo_concurrency.lockutils [req-03c384d1-f17e-4dcb-8bd0-638f68b6080a req-518c2ea1-0044-42a7-8da6-c7a1f82eab9d service nova] Acquiring lock "refresh_cache-5992f657-c29e-4da5-98f1-286a384ca0cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.892210] env[61852]: DEBUG oslo_concurrency.lockutils [req-03c384d1-f17e-4dcb-8bd0-638f68b6080a req-518c2ea1-0044-42a7-8da6-c7a1f82eab9d service nova] Acquired lock "refresh_cache-5992f657-c29e-4da5-98f1-286a384ca0cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.892885] env[61852]: DEBUG nova.network.neutron [req-03c384d1-f17e-4dcb-8bd0-638f68b6080a req-518c2ea1-0044-42a7-8da6-c7a1f82eab9d service nova] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Refreshing network info cache for port 0d927d4f-0ee3-47fd-8f50-9c9eac097544 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.928379] env[61852]: DEBUG nova.scheduler.client.report [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 721.989519] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 722.005293] env[61852]: ERROR nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0d927d4f-0ee3-47fd-8f50-9c9eac097544, please check neutron logs for more information. [ 722.005293] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 722.005293] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.005293] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 722.005293] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 722.005293] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 722.005293] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 722.005293] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 722.005293] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.005293] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 722.005293] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.005293] env[61852]: ERROR nova.compute.manager raise self.value [ 722.005293] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 722.005293] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 722.005293] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.005293] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 722.005748] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.005748] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 722.005748] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0d927d4f-0ee3-47fd-8f50-9c9eac097544, please check neutron logs for more information. [ 722.005748] env[61852]: ERROR nova.compute.manager [ 722.005748] env[61852]: Traceback (most recent call last): [ 722.005748] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 722.005748] env[61852]: listener.cb(fileno) [ 722.005748] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 722.005748] env[61852]: result = function(*args, **kwargs) [ 722.005748] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 722.005748] env[61852]: return func(*args, **kwargs) [ 722.005748] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 722.005748] env[61852]: raise e [ 722.005748] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.005748] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 722.005748] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 722.005748] env[61852]: created_port_ids = self._update_ports_for_instance( [ 722.005748] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 722.005748] env[61852]: with excutils.save_and_reraise_exception(): [ 722.005748] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.005748] env[61852]: self.force_reraise() [ 722.005748] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.005748] env[61852]: raise self.value [ 722.005748] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 722.005748] env[61852]: updated_port = self._update_port( [ 722.005748] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.005748] env[61852]: _ensure_no_port_binding_failure(port) [ 722.005748] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.005748] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 722.006681] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 0d927d4f-0ee3-47fd-8f50-9c9eac097544, please check neutron logs for more information. [ 722.006681] env[61852]: Removing descriptor: 19 [ 722.017225] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 722.017521] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 722.017656] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 722.017826] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 722.017960] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 722.018248] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 722.018514] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 722.019183] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 722.019183] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 722.019183] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 722.019183] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 722.020956] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80dfd23f-c3ab-4a27-9030-9df441783a9c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.032348] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0ae86d-b23e-4934-9985-06d8b6b755de {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.054518] env[61852]: ERROR nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0d927d4f-0ee3-47fd-8f50-9c9eac097544, please check neutron logs for more information. [ 722.054518] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Traceback (most recent call last): [ 722.054518] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 722.054518] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] yield resources [ 722.054518] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 722.054518] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] self.driver.spawn(context, instance, image_meta, [ 722.054518] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 722.054518] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 722.054518] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 722.054518] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] vm_ref = self.build_virtual_machine(instance, [ 722.054518] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] for vif in network_info: [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] return self._sync_wrapper(fn, *args, **kwargs) [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] self.wait() [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] self[:] = self._gt.wait() [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] return self._exit_event.wait() [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 722.054895] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] current.throw(*self._exc) [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] result = function(*args, **kwargs) [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] return func(*args, **kwargs) [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] raise e [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] nwinfo = self.network_api.allocate_for_instance( [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] created_port_ids = self._update_ports_for_instance( [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] with excutils.save_and_reraise_exception(): [ 722.055317] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] self.force_reraise() [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] raise self.value [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] updated_port = self._update_port( [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] _ensure_no_port_binding_failure(port) [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] raise exception.PortBindingFailed(port_id=port['id']) [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] nova.exception.PortBindingFailed: Binding failed for port 0d927d4f-0ee3-47fd-8f50-9c9eac097544, please check neutron logs for more information. [ 722.055756] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] [ 722.055756] env[61852]: INFO nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Terminating instance [ 722.060657] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292718, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026027} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.061632] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "refresh_cache-5992f657-c29e-4da5-98f1-286a384ca0cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.061632] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 722.061632] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Moving file from [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc/90fd8f39-16b3-43e0-a682-0ec131005e31 to [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31. {{(pid=61852) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 722.062591] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-c6314714-8323-4a56-82fd-a5cb97e7a8a3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.068968] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 722.068968] env[61852]: value = "task-1292719" [ 722.068968] env[61852]: _type = "Task" [ 722.068968] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.076862] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292719, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.227397] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.414651] env[61852]: DEBUG nova.network.neutron [req-03c384d1-f17e-4dcb-8bd0-638f68b6080a req-518c2ea1-0044-42a7-8da6-c7a1f82eab9d service nova] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.433870] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.434525] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 722.437201] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.459s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.437378] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.437637] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 722.437807] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.586s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.441266] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a18767e-70e1-44f3-b21c-e21429672622 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.450074] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdb3e56-560a-4219-b3e3-ffa31fc285b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.469262] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df7ed9b-f324-4cbc-b407-a3f01ba2e534 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.477174] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c8622d-a6ab-43de-8281-5f0918edf082 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.510397] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181537MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 722.510500] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.559960] env[61852]: DEBUG nova.network.neutron [req-03c384d1-f17e-4dcb-8bd0-638f68b6080a req-518c2ea1-0044-42a7-8da6-c7a1f82eab9d service nova] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.578670] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292719, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024311} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.578957] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] File moved {{(pid=61852) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 722.579169] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Cleaning up location [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 722.579332] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Deleting the datastore file [datastore2] vmware_temp/a5dada8d-0838-46ad-beb0-b5074bdf41bc {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 722.579611] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cfe3d3e-4644-44bb-89aa-da584f375180 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.585547] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 722.585547] env[61852]: value = "task-1292720" [ 722.585547] env[61852]: _type = "Task" [ 722.585547] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.593633] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292720, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.859774] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.860048] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.942866] env[61852]: DEBUG nova.compute.utils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 722.944305] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 722.946426] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 722.993399] env[61852]: DEBUG nova.policy [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52bf0aad002740e28da26a9e1d6b14da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '856d91d948e84ab69536db1faebf54ee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 723.066270] env[61852]: DEBUG oslo_concurrency.lockutils [req-03c384d1-f17e-4dcb-8bd0-638f68b6080a req-518c2ea1-0044-42a7-8da6-c7a1f82eab9d service nova] Releasing lock "refresh_cache-5992f657-c29e-4da5-98f1-286a384ca0cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.066670] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired lock "refresh_cache-5992f657-c29e-4da5-98f1-286a384ca0cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.066853] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 723.100220] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292720, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023044} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.100772] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 723.101447] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54ebb125-87ba-4aee-a688-313c7fe59390 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.106595] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 723.106595] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ad855f-775b-4901-15b8-4609d076a879" [ 723.106595] env[61852]: _type = "Task" [ 723.106595] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.115018] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ad855f-775b-4901-15b8-4609d076a879, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.268736] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Successfully created port: 8a46cf7a-24ec-48dc-86fa-470a8270cb0d {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 723.308624] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312179e4-79c5-4992-96cc-b8a08ccb3d72 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.316536] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e4bd4a-fa3d-49e1-a5d3-0ab22ebdb404 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.350228] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a7be9a-aec5-45d7-9337-ff6e50fe699c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.357711] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf72d564-fc04-417f-bbcd-b53d969a8a6a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.378553] env[61852]: DEBUG nova.compute.provider_tree [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.450113] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 723.590020] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.618584] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ad855f-775b-4901-15b8-4609d076a879, 'name': SearchDatastore_Task, 'duration_secs': 0.008954} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.619027] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.622089] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 694889e8-200e-454c-9e87-60521dd044d9/694889e8-200e-454c-9e87-60521dd044d9.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 723.622089] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da7f5323-6de9-44c6-9244-7e7eadb534fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.630034] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 723.630034] env[61852]: value = "task-1292721" [ 723.630034] env[61852]: _type = "Task" [ 723.630034] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.635274] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292721, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.738599] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.884373] env[61852]: DEBUG nova.scheduler.client.report [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 724.053894] env[61852]: DEBUG nova.compute.manager [req-6fa6f4bf-6e62-4cdb-be94-10aab7ec3570 req-61f38593-8660-4f3a-9e67-93048d58e6e4 service nova] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Received event network-vif-deleted-0d927d4f-0ee3-47fd-8f50-9c9eac097544 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 724.137989] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292721, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.240340] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Releasing lock "refresh_cache-5992f657-c29e-4da5-98f1-286a384ca0cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.240768] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 724.240960] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 724.241284] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d24cf4b-5387-4fdc-85a3-a66063a5cd22 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.256016] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43618104-839d-4140-81ec-30c291a2a849 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.288405] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5992f657-c29e-4da5-98f1-286a384ca0cd could not be found. [ 724.288405] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 724.288405] env[61852]: INFO nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Took 0.05 seconds to destroy the instance on the hypervisor. [ 724.288405] env[61852]: DEBUG oslo.service.loopingcall [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.288405] env[61852]: DEBUG nova.compute.manager [-] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 724.288639] env[61852]: DEBUG nova.network.neutron [-] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 724.316207] env[61852]: DEBUG nova.network.neutron [-] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.390592] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.953s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.391884] env[61852]: ERROR nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4e8a73cc-718d-48f5-b710-deb25af08562, please check neutron logs for more information. [ 724.391884] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Traceback (most recent call last): [ 724.391884] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 724.391884] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] self.driver.spawn(context, instance, image_meta, [ 724.391884] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 724.391884] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 724.391884] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 724.391884] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] vm_ref = self.build_virtual_machine(instance, [ 724.391884] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 724.391884] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] vif_infos = vmwarevif.get_vif_info(self._session, [ 724.391884] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] for vif in network_info: [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] return self._sync_wrapper(fn, *args, **kwargs) [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] self.wait() [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] self[:] = self._gt.wait() [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] return self._exit_event.wait() [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] current.throw(*self._exc) [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 724.392301] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] result = function(*args, **kwargs) [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] return func(*args, **kwargs) [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] raise e [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] nwinfo = self.network_api.allocate_for_instance( [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] created_port_ids = self._update_ports_for_instance( [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] with excutils.save_and_reraise_exception(): [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] self.force_reraise() [ 724.392929] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.393326] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] raise self.value [ 724.393326] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.393326] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] updated_port = self._update_port( [ 724.393326] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.393326] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] _ensure_no_port_binding_failure(port) [ 724.393326] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.393326] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] raise exception.PortBindingFailed(port_id=port['id']) [ 724.393326] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] nova.exception.PortBindingFailed: Binding failed for port 4e8a73cc-718d-48f5-b710-deb25af08562, please check neutron logs for more information. [ 724.393326] env[61852]: ERROR nova.compute.manager [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] [ 724.393597] env[61852]: DEBUG nova.compute.utils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Binding failed for port 4e8a73cc-718d-48f5-b710-deb25af08562, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 724.394888] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.630s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.396786] env[61852]: INFO nova.compute.claims [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.400565] env[61852]: DEBUG nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Build of instance 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9 was re-scheduled: Binding failed for port 4e8a73cc-718d-48f5-b710-deb25af08562, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 724.401087] env[61852]: DEBUG nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 724.401484] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.401674] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.401899] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.458370] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 724.501826] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 724.501911] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 724.502063] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.502242] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 724.502501] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.502585] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 724.502713] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 724.502876] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 724.503379] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 724.503605] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 724.503782] env[61852]: DEBUG nova.virt.hardware [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.504650] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da31922-b34f-4a39-b05e-7cabd099f373 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.517127] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35972a9c-3dad-421d-bc92-61c88dbc918e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.641582] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292721, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524891} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.641875] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 694889e8-200e-454c-9e87-60521dd044d9/694889e8-200e-454c-9e87-60521dd044d9.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 724.642166] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 724.642349] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b923409-b526-458e-bddb-27f39f3570f7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.648618] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 724.648618] env[61852]: value = "task-1292722" [ 724.648618] env[61852]: _type = "Task" [ 724.648618] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.657689] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292722, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.803564] env[61852]: ERROR nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8a46cf7a-24ec-48dc-86fa-470a8270cb0d, please check neutron logs for more information. [ 724.803564] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 724.803564] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 724.803564] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 724.803564] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.803564] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 724.803564] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.803564] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 724.803564] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.803564] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 724.803564] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.803564] env[61852]: ERROR nova.compute.manager raise self.value [ 724.803564] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.803564] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 724.803564] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.803564] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 724.804152] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.804152] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 724.804152] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8a46cf7a-24ec-48dc-86fa-470a8270cb0d, please check neutron logs for more information. [ 724.804152] env[61852]: ERROR nova.compute.manager [ 724.804152] env[61852]: Traceback (most recent call last): [ 724.804152] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 724.804152] env[61852]: listener.cb(fileno) [ 724.804152] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 724.804152] env[61852]: result = function(*args, **kwargs) [ 724.804152] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 724.804152] env[61852]: return func(*args, **kwargs) [ 724.804152] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 724.804152] env[61852]: raise e [ 724.804152] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 724.804152] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 724.804152] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.804152] env[61852]: created_port_ids = self._update_ports_for_instance( [ 724.804152] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.804152] env[61852]: with excutils.save_and_reraise_exception(): [ 724.804152] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.804152] env[61852]: self.force_reraise() [ 724.804152] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.804152] env[61852]: raise self.value [ 724.804152] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.804152] env[61852]: updated_port = self._update_port( [ 724.804152] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.804152] env[61852]: _ensure_no_port_binding_failure(port) [ 724.804152] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.804152] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 724.804964] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 8a46cf7a-24ec-48dc-86fa-470a8270cb0d, please check neutron logs for more information. [ 724.804964] env[61852]: Removing descriptor: 19 [ 724.804964] env[61852]: ERROR nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8a46cf7a-24ec-48dc-86fa-470a8270cb0d, please check neutron logs for more information. [ 724.804964] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Traceback (most recent call last): [ 724.804964] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 724.804964] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] yield resources [ 724.804964] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 724.804964] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] self.driver.spawn(context, instance, image_meta, [ 724.804964] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 724.804964] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] self._vmops.spawn(context, instance, image_meta, injected_files, [ 724.804964] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 724.804964] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] vm_ref = self.build_virtual_machine(instance, [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] vif_infos = vmwarevif.get_vif_info(self._session, [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] for vif in network_info: [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] return self._sync_wrapper(fn, *args, **kwargs) [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] self.wait() [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] self[:] = self._gt.wait() [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] return self._exit_event.wait() [ 724.805387] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] result = hub.switch() [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] return self.greenlet.switch() [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] result = function(*args, **kwargs) [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] return func(*args, **kwargs) [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] raise e [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] nwinfo = self.network_api.allocate_for_instance( [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.805799] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] created_port_ids = self._update_ports_for_instance( [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] with excutils.save_and_reraise_exception(): [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] self.force_reraise() [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] raise self.value [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] updated_port = self._update_port( [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] _ensure_no_port_binding_failure(port) [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.806273] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] raise exception.PortBindingFailed(port_id=port['id']) [ 724.806648] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] nova.exception.PortBindingFailed: Binding failed for port 8a46cf7a-24ec-48dc-86fa-470a8270cb0d, please check neutron logs for more information. [ 724.806648] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] [ 724.806648] env[61852]: INFO nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Terminating instance [ 724.807407] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "refresh_cache-d7ca3eac-9738-483a-ae14-67e17929a251" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.807600] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired lock "refresh_cache-d7ca3eac-9738-483a-ae14-67e17929a251" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.807772] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.817984] env[61852]: DEBUG nova.network.neutron [-] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.926414] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.008085] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.164250] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292722, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062681} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.164250] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 725.164250] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d4e3a7-3f98-45c9-9bdb-9a6f7ce29db1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.190498] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 694889e8-200e-454c-9e87-60521dd044d9/694889e8-200e-454c-9e87-60521dd044d9.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 725.190829] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8560792a-4a4f-462c-acfd-35d0333020b0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.210521] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 725.210521] env[61852]: value = "task-1292723" [ 725.210521] env[61852]: _type = "Task" [ 725.210521] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.220240] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292723, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.320072] env[61852]: INFO nova.compute.manager [-] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Took 1.03 seconds to deallocate network for instance. [ 725.322480] env[61852]: DEBUG nova.compute.claims [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 725.322614] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.328694] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.511104] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.514632] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.514844] env[61852]: DEBUG nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 725.515073] env[61852]: DEBUG nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 725.516475] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 725.535398] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.725621] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292723, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.807029] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d18317-5d79-42c7-b502-3e4fd2701fc2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.815181] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f97caad-bfd7-4f65-82c6-bbb709d380c8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.844712] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f1df4d-c5c3-4f52-b85f-798ab16207e3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.851762] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9145ee-17c5-4657-8e30-8c72cefb5729 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.864675] env[61852]: DEBUG nova.compute.provider_tree [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.018301] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Releasing lock "refresh_cache-d7ca3eac-9738-483a-ae14-67e17929a251" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.019009] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 726.019009] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 726.019009] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f88321e-7a2a-48c8-a716-6e75f466f44c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.027931] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c463591a-aab3-4d46-952e-52c0a96e3d35 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.039424] env[61852]: DEBUG nova.network.neutron [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.052114] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d7ca3eac-9738-483a-ae14-67e17929a251 could not be found. [ 726.052342] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 726.052518] env[61852]: INFO nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Took 0.03 seconds to destroy the instance on the hypervisor. [ 726.052862] env[61852]: DEBUG oslo.service.loopingcall [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 726.053122] env[61852]: DEBUG nova.compute.manager [-] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 726.053218] env[61852]: DEBUG nova.network.neutron [-] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 726.079782] env[61852]: DEBUG nova.network.neutron [-] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.208061] env[61852]: DEBUG nova.compute.manager [req-cafb7527-c07a-49f1-80d0-3716dd8db316 req-6c52a137-0b3e-4c2a-a9d0-97b3ffcf186f service nova] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Received event network-changed-8a46cf7a-24ec-48dc-86fa-470a8270cb0d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 726.208252] env[61852]: DEBUG nova.compute.manager [req-cafb7527-c07a-49f1-80d0-3716dd8db316 req-6c52a137-0b3e-4c2a-a9d0-97b3ffcf186f service nova] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Refreshing instance network info cache due to event network-changed-8a46cf7a-24ec-48dc-86fa-470a8270cb0d. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 726.208455] env[61852]: DEBUG oslo_concurrency.lockutils [req-cafb7527-c07a-49f1-80d0-3716dd8db316 req-6c52a137-0b3e-4c2a-a9d0-97b3ffcf186f service nova] Acquiring lock "refresh_cache-d7ca3eac-9738-483a-ae14-67e17929a251" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.208586] env[61852]: DEBUG oslo_concurrency.lockutils [req-cafb7527-c07a-49f1-80d0-3716dd8db316 req-6c52a137-0b3e-4c2a-a9d0-97b3ffcf186f service nova] Acquired lock "refresh_cache-d7ca3eac-9738-483a-ae14-67e17929a251" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.208739] env[61852]: DEBUG nova.network.neutron [req-cafb7527-c07a-49f1-80d0-3716dd8db316 req-6c52a137-0b3e-4c2a-a9d0-97b3ffcf186f service nova] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Refreshing network info cache for port 8a46cf7a-24ec-48dc-86fa-470a8270cb0d {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 726.225686] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292723, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.367325] env[61852]: DEBUG nova.scheduler.client.report [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 726.544030] env[61852]: INFO nova.compute.manager [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9] Took 1.03 seconds to deallocate network for instance. [ 726.585304] env[61852]: DEBUG nova.network.neutron [-] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.724919] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292723, 'name': ReconfigVM_Task, 'duration_secs': 1.271319} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.725138] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 694889e8-200e-454c-9e87-60521dd044d9/694889e8-200e-454c-9e87-60521dd044d9.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 726.726813] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a3ce0a0-59f3-4e44-a644-561ea32ba3df {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.733047] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 726.733047] env[61852]: value = "task-1292724" [ 726.733047] env[61852]: _type = "Task" [ 726.733047] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.736915] env[61852]: DEBUG nova.network.neutron [req-cafb7527-c07a-49f1-80d0-3716dd8db316 req-6c52a137-0b3e-4c2a-a9d0-97b3ffcf186f service nova] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.743353] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292724, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.824051] env[61852]: DEBUG nova.network.neutron [req-cafb7527-c07a-49f1-80d0-3716dd8db316 req-6c52a137-0b3e-4c2a-a9d0-97b3ffcf186f service nova] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.879357] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.879937] env[61852]: DEBUG nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 726.882863] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.158s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.088444] env[61852]: INFO nova.compute.manager [-] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Took 1.03 seconds to deallocate network for instance. [ 727.091422] env[61852]: DEBUG nova.compute.claims [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 727.091599] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.242613] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "89970cff-cb49-4803-81a5-1675b0ea4aaf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.242919] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "89970cff-cb49-4803-81a5-1675b0ea4aaf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.250887] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292724, 'name': Rename_Task, 'duration_secs': 0.128} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.250887] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 727.251537] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31274d02-2959-4b09-9a53-6696cdc1d10e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.262780] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 727.262780] env[61852]: value = "task-1292725" [ 727.262780] env[61852]: _type = "Task" [ 727.262780] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.272415] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292725, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.281379] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "f8ebb1b7-39c6-486e-ab25-23080d858846" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.281692] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "f8ebb1b7-39c6-486e-ab25-23080d858846" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.313925] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.314573] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.326821] env[61852]: DEBUG oslo_concurrency.lockutils [req-cafb7527-c07a-49f1-80d0-3716dd8db316 req-6c52a137-0b3e-4c2a-a9d0-97b3ffcf186f service nova] Releasing lock "refresh_cache-d7ca3eac-9738-483a-ae14-67e17929a251" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.327160] env[61852]: DEBUG nova.compute.manager [req-cafb7527-c07a-49f1-80d0-3716dd8db316 req-6c52a137-0b3e-4c2a-a9d0-97b3ffcf186f service nova] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Received event network-vif-deleted-8a46cf7a-24ec-48dc-86fa-470a8270cb0d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.388321] env[61852]: DEBUG nova.compute.utils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 727.393761] env[61852]: DEBUG nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 727.393761] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 727.438894] env[61852]: DEBUG nova.policy [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cefed08efd6745449262bd9f880117d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d2f2d599ffd40a9b55f63c10e9c0e2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 727.583031] env[61852]: INFO nova.scheduler.client.report [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted allocations for instance 290aca37-d0d7-4c8c-b8cf-8b787bbf95c9 [ 727.774692] env[61852]: DEBUG oslo_vmware.api [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292725, 'name': PowerOnVM_Task, 'duration_secs': 0.4172} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.777636] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 727.777826] env[61852]: DEBUG nova.compute.manager [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 727.778985] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c757b5-b50e-4e71-9d78-6eaef6da5fb6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.822081] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Successfully created port: 8b029d4d-780a-46e9-8f80-1cccecf4f293 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.861415] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89154de6-8825-4b04-bc8e-7cbb9066f57a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.870662] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2ebdbb-abac-46fe-b45f-3d29054683cc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.904065] env[61852]: DEBUG nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 727.908506] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9ad766-4bc2-45f5-a8b4-a2f9773a07be {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.916626] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752a0a0e-d0d4-44cf-9d91-299e3d3687e9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.930964] env[61852]: DEBUG nova.compute.provider_tree [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.095024] env[61852]: DEBUG oslo_concurrency.lockutils [None req-86dc4814-b0a7-4bbe-b85a-63c2dcd2b0ba tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "290aca37-d0d7-4c8c-b8cf-8b787bbf95c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.546s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.296045] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.433931] env[61852]: DEBUG nova.scheduler.client.report [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 728.602183] env[61852]: DEBUG nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 728.648641] env[61852]: DEBUG nova.compute.manager [req-43a50816-7e35-4170-b322-6097680d9b17 req-07b28a0e-3c4e-420d-ab23-0859e77fbe71 service nova] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Received event network-changed-8b029d4d-780a-46e9-8f80-1cccecf4f293 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 728.649012] env[61852]: DEBUG nova.compute.manager [req-43a50816-7e35-4170-b322-6097680d9b17 req-07b28a0e-3c4e-420d-ab23-0859e77fbe71 service nova] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Refreshing instance network info cache due to event network-changed-8b029d4d-780a-46e9-8f80-1cccecf4f293. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 728.649398] env[61852]: DEBUG oslo_concurrency.lockutils [req-43a50816-7e35-4170-b322-6097680d9b17 req-07b28a0e-3c4e-420d-ab23-0859e77fbe71 service nova] Acquiring lock "refresh_cache-bd549d69-403b-4c5c-9e08-0c84d32a7c0a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.649694] env[61852]: DEBUG oslo_concurrency.lockutils [req-43a50816-7e35-4170-b322-6097680d9b17 req-07b28a0e-3c4e-420d-ab23-0859e77fbe71 service nova] Acquired lock "refresh_cache-bd549d69-403b-4c5c-9e08-0c84d32a7c0a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.650207] env[61852]: DEBUG nova.network.neutron [req-43a50816-7e35-4170-b322-6097680d9b17 req-07b28a0e-3c4e-420d-ab23-0859e77fbe71 service nova] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Refreshing network info cache for port 8b029d4d-780a-46e9-8f80-1cccecf4f293 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 728.836268] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "b0f8f7dd-e559-43be-b541-c3da48a07d68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.836587] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "b0f8f7dd-e559-43be-b541-c3da48a07d68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.846262] env[61852]: ERROR nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8b029d4d-780a-46e9-8f80-1cccecf4f293, please check neutron logs for more information. [ 728.846262] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 728.846262] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.846262] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 728.846262] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.846262] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 728.846262] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.846262] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 728.846262] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.846262] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 728.846262] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.846262] env[61852]: ERROR nova.compute.manager raise self.value [ 728.846262] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.846262] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 728.846262] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.846262] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 728.846811] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.846811] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 728.846811] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8b029d4d-780a-46e9-8f80-1cccecf4f293, please check neutron logs for more information. [ 728.846811] env[61852]: ERROR nova.compute.manager [ 728.846811] env[61852]: Traceback (most recent call last): [ 728.846811] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 728.846811] env[61852]: listener.cb(fileno) [ 728.846811] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.846811] env[61852]: result = function(*args, **kwargs) [ 728.846811] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 728.846811] env[61852]: return func(*args, **kwargs) [ 728.846811] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 728.846811] env[61852]: raise e [ 728.846811] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.846811] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 728.846811] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.846811] env[61852]: created_port_ids = self._update_ports_for_instance( [ 728.846811] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.846811] env[61852]: with excutils.save_and_reraise_exception(): [ 728.846811] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.846811] env[61852]: self.force_reraise() [ 728.846811] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.846811] env[61852]: raise self.value [ 728.846811] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.846811] env[61852]: updated_port = self._update_port( [ 728.846811] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.846811] env[61852]: _ensure_no_port_binding_failure(port) [ 728.846811] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.846811] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 728.847684] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 8b029d4d-780a-46e9-8f80-1cccecf4f293, please check neutron logs for more information. [ 728.847684] env[61852]: Removing descriptor: 19 [ 728.921490] env[61852]: DEBUG nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 728.940349] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.057s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.941405] env[61852]: ERROR nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de, please check neutron logs for more information. [ 728.941405] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Traceback (most recent call last): [ 728.941405] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 728.941405] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] self.driver.spawn(context, instance, image_meta, [ 728.941405] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 728.941405] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 728.941405] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 728.941405] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] vm_ref = self.build_virtual_machine(instance, [ 728.941405] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 728.941405] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] vif_infos = vmwarevif.get_vif_info(self._session, [ 728.941405] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] for vif in network_info: [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] return self._sync_wrapper(fn, *args, **kwargs) [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] self.wait() [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] self[:] = self._gt.wait() [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] return self._exit_event.wait() [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] current.throw(*self._exc) [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.941926] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] result = function(*args, **kwargs) [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] return func(*args, **kwargs) [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] raise e [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] nwinfo = self.network_api.allocate_for_instance( [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] created_port_ids = self._update_ports_for_instance( [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] with excutils.save_and_reraise_exception(): [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] self.force_reraise() [ 728.942347] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.942743] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] raise self.value [ 728.942743] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.942743] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] updated_port = self._update_port( [ 728.942743] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.942743] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] _ensure_no_port_binding_failure(port) [ 728.942743] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.942743] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] raise exception.PortBindingFailed(port_id=port['id']) [ 728.942743] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] nova.exception.PortBindingFailed: Binding failed for port 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de, please check neutron logs for more information. [ 728.942743] env[61852]: ERROR nova.compute.manager [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] [ 728.942743] env[61852]: DEBUG nova.compute.utils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Binding failed for port 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 728.947024] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.103s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.948287] env[61852]: INFO nova.compute.claims [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 728.951716] env[61852]: DEBUG nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Build of instance d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb was re-scheduled: Binding failed for port 0e4e011d-e562-4d8b-ae0d-b0da4dbe80de, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 728.951853] env[61852]: DEBUG nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 728.952022] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Acquiring lock "refresh_cache-d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.952177] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Acquired lock "refresh_cache-d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.952341] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.963029] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 728.963193] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 728.963355] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.963535] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 728.963671] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.963818] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 728.964036] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 728.964201] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 728.964509] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 728.964509] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 728.964678] env[61852]: DEBUG nova.virt.hardware [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 728.965552] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1a330c-c59d-4747-8e81-6941482fa31e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.975074] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a918fc-a9a6-4dff-94ee-3e0159c216f6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.996765] env[61852]: ERROR nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8b029d4d-780a-46e9-8f80-1cccecf4f293, please check neutron logs for more information. [ 728.996765] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Traceback (most recent call last): [ 728.996765] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 728.996765] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] yield resources [ 728.996765] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 728.996765] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] self.driver.spawn(context, instance, image_meta, [ 728.996765] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 728.996765] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 728.996765] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 728.996765] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] vm_ref = self.build_virtual_machine(instance, [ 728.996765] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] vif_infos = vmwarevif.get_vif_info(self._session, [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] for vif in network_info: [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] return self._sync_wrapper(fn, *args, **kwargs) [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] self.wait() [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] self[:] = self._gt.wait() [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] return self._exit_event.wait() [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 728.997941] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] current.throw(*self._exc) [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] result = function(*args, **kwargs) [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] return func(*args, **kwargs) [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] raise e [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] nwinfo = self.network_api.allocate_for_instance( [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] created_port_ids = self._update_ports_for_instance( [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] with excutils.save_and_reraise_exception(): [ 728.998629] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] self.force_reraise() [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] raise self.value [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] updated_port = self._update_port( [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] _ensure_no_port_binding_failure(port) [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] raise exception.PortBindingFailed(port_id=port['id']) [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] nova.exception.PortBindingFailed: Binding failed for port 8b029d4d-780a-46e9-8f80-1cccecf4f293, please check neutron logs for more information. [ 728.998992] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] [ 728.998992] env[61852]: INFO nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Terminating instance [ 728.999874] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Acquiring lock "refresh_cache-bd549d69-403b-4c5c-9e08-0c84d32a7c0a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.133718] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.170436] env[61852]: DEBUG nova.network.neutron [req-43a50816-7e35-4170-b322-6097680d9b17 req-07b28a0e-3c4e-420d-ab23-0859e77fbe71 service nova] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.251519] env[61852]: DEBUG nova.network.neutron [req-43a50816-7e35-4170-b322-6097680d9b17 req-07b28a0e-3c4e-420d-ab23-0859e77fbe71 service nova] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.475262] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.499697] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "694889e8-200e-454c-9e87-60521dd044d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.499966] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "694889e8-200e-454c-9e87-60521dd044d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.500999] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "694889e8-200e-454c-9e87-60521dd044d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.500999] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "694889e8-200e-454c-9e87-60521dd044d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.500999] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "694889e8-200e-454c-9e87-60521dd044d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.503227] env[61852]: INFO nova.compute.manager [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Terminating instance [ 729.504333] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "refresh_cache-694889e8-200e-454c-9e87-60521dd044d9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.505459] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquired lock "refresh_cache-694889e8-200e-454c-9e87-60521dd044d9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.505459] env[61852]: DEBUG nova.network.neutron [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.555668] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.756140] env[61852]: DEBUG oslo_concurrency.lockutils [req-43a50816-7e35-4170-b322-6097680d9b17 req-07b28a0e-3c4e-420d-ab23-0859e77fbe71 service nova] Releasing lock "refresh_cache-bd549d69-403b-4c5c-9e08-0c84d32a7c0a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.756140] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Acquired lock "refresh_cache-bd549d69-403b-4c5c-9e08-0c84d32a7c0a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.756140] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 730.022856] env[61852]: DEBUG nova.network.neutron [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.061647] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Releasing lock "refresh_cache-d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.061756] env[61852]: DEBUG nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 730.061965] env[61852]: DEBUG nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 730.062247] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.082574] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.086605] env[61852]: DEBUG nova.network.neutron [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.184356] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.184658] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.276923] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.339849] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7f6d08-eb6d-4b72-aa31-5bc44ba6937b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.347951] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3694781-3f25-4b68-925c-187068e9a58c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.353736] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.381183] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Releasing lock "refresh_cache-bd549d69-403b-4c5c-9e08-0c84d32a7c0a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.381613] env[61852]: DEBUG nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 730.381798] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 730.382533] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abe9630-d0e3-49a7-9069-0e7f31828cc2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.385984] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28775ec2-2a1c-44ce-9f75-49210b339f8b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.392975] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536d6091-64eb-496e-b72e-fa72e3380849 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.399594] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa1d178-91e8-4e04-83be-51b1bc16d2d6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.418950] env[61852]: DEBUG nova.compute.provider_tree [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.424145] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bd549d69-403b-4c5c-9e08-0c84d32a7c0a could not be found. [ 730.424354] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 730.424529] env[61852]: INFO nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 730.424777] env[61852]: DEBUG oslo.service.loopingcall [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.425193] env[61852]: DEBUG nova.compute.manager [-] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 730.425286] env[61852]: DEBUG nova.network.neutron [-] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.440291] env[61852]: DEBUG nova.network.neutron [-] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.586627] env[61852]: DEBUG nova.network.neutron [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.588573] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Releasing lock "refresh_cache-694889e8-200e-454c-9e87-60521dd044d9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.588948] env[61852]: DEBUG nova.compute.manager [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 730.589161] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 730.590019] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f1c7f3-a706-4e75-aa67-ca2aee6a1a52 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.597898] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 730.598769] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86a61c77-0eda-4263-8a59-810edee457ac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.605073] env[61852]: DEBUG oslo_vmware.api [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 730.605073] env[61852]: value = "task-1292726" [ 730.605073] env[61852]: _type = "Task" [ 730.605073] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.612646] env[61852]: DEBUG oslo_vmware.api [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.678276] env[61852]: DEBUG nova.compute.manager [req-2583c50e-33ec-4b16-a974-0ca7b609c9c1 req-19991155-a1a1-4a56-a7d9-0bf928253a6a service nova] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Received event network-vif-deleted-8b029d4d-780a-46e9-8f80-1cccecf4f293 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 730.926566] env[61852]: DEBUG nova.scheduler.client.report [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 730.943196] env[61852]: DEBUG nova.network.neutron [-] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.089334] env[61852]: INFO nova.compute.manager [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] [instance: d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb] Took 1.03 seconds to deallocate network for instance. [ 731.116737] env[61852]: DEBUG oslo_vmware.api [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292726, 'name': PowerOffVM_Task, 'duration_secs': 0.144569} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.117017] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.117224] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.117482] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e03b30ac-0e7d-49ce-9040-264169c3ac6a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.141951] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.142190] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.142372] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Deleting the datastore file [datastore2] 694889e8-200e-454c-9e87-60521dd044d9 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.142625] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5dffde5e-47f0-4c94-bcb6-a40529ccf65e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.148679] env[61852]: DEBUG oslo_vmware.api [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for the task: (returnval){ [ 731.148679] env[61852]: value = "task-1292728" [ 731.148679] env[61852]: _type = "Task" [ 731.148679] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.156326] env[61852]: DEBUG oslo_vmware.api [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292728, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.433297] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.487s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.433833] env[61852]: DEBUG nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 731.436580] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.623s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.448187] env[61852]: INFO nova.compute.manager [-] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Took 1.02 seconds to deallocate network for instance. [ 731.449393] env[61852]: DEBUG nova.compute.claims [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 731.449519] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.659060] env[61852]: DEBUG oslo_vmware.api [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Task: {'id': task-1292728, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19942} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.659570] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.659882] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 731.660202] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 731.660502] env[61852]: INFO nova.compute.manager [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Took 1.07 seconds to destroy the instance on the hypervisor. [ 731.660875] env[61852]: DEBUG oslo.service.loopingcall [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.661465] env[61852]: DEBUG nova.compute.manager [-] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 731.661688] env[61852]: DEBUG nova.network.neutron [-] [instance: 694889e8-200e-454c-9e87-60521dd044d9] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.677274] env[61852]: DEBUG nova.network.neutron [-] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.941280] env[61852]: DEBUG nova.compute.utils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 731.945377] env[61852]: DEBUG nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 731.945622] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 731.984265] env[61852]: DEBUG nova.policy [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e46b44ff06d4ff2b0670ef17b3ef6e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ab258d1a24f3459d95421bcb84287f85', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 732.119949] env[61852]: INFO nova.scheduler.client.report [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Deleted allocations for instance d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb [ 732.180270] env[61852]: DEBUG nova.network.neutron [-] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.256067] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Successfully created port: c3c54f6f-e903-4008-8261-667cea9cbc6e {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 732.281402] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5f9235-d0f3-4a08-91e2-3daa06fe949d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.289105] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59dba79-38ff-424f-8768-ac8a794d1f5f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.320564] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0649f9fa-8da7-4e1f-8360-eeee832e37a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.328610] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9644933a-0876-4e79-baa1-bf553ad49a3c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.342144] env[61852]: DEBUG nova.compute.provider_tree [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.449034] env[61852]: DEBUG nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 732.631319] env[61852]: DEBUG oslo_concurrency.lockutils [None req-50d9682e-a51d-4742-94a9-a63326dd46f1 tempest-ServerActionsTestOtherA-2083424245 tempest-ServerActionsTestOtherA-2083424245-project-member] Lock "d455fa51-f38f-4bb7-ba28-cf03c2ce3dbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.801s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.683331] env[61852]: INFO nova.compute.manager [-] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Took 1.02 seconds to deallocate network for instance. [ 732.846289] env[61852]: DEBUG nova.scheduler.client.report [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 732.930946] env[61852]: DEBUG nova.compute.manager [req-51617d8e-20c7-46d9-bdab-03106d47ea1b req-c8598945-e229-45f4-9ce6-7777202bf76b service nova] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Received event network-changed-c3c54f6f-e903-4008-8261-667cea9cbc6e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 732.930946] env[61852]: DEBUG nova.compute.manager [req-51617d8e-20c7-46d9-bdab-03106d47ea1b req-c8598945-e229-45f4-9ce6-7777202bf76b service nova] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Refreshing instance network info cache due to event network-changed-c3c54f6f-e903-4008-8261-667cea9cbc6e. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 732.930946] env[61852]: DEBUG oslo_concurrency.lockutils [req-51617d8e-20c7-46d9-bdab-03106d47ea1b req-c8598945-e229-45f4-9ce6-7777202bf76b service nova] Acquiring lock "refresh_cache-aae42775-cb43-4eee-967a-9ba0bdde7783" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.930946] env[61852]: DEBUG oslo_concurrency.lockutils [req-51617d8e-20c7-46d9-bdab-03106d47ea1b req-c8598945-e229-45f4-9ce6-7777202bf76b service nova] Acquired lock "refresh_cache-aae42775-cb43-4eee-967a-9ba0bdde7783" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.932067] env[61852]: DEBUG nova.network.neutron [req-51617d8e-20c7-46d9-bdab-03106d47ea1b req-c8598945-e229-45f4-9ce6-7777202bf76b service nova] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Refreshing network info cache for port c3c54f6f-e903-4008-8261-667cea9cbc6e {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 733.087418] env[61852]: ERROR nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c3c54f6f-e903-4008-8261-667cea9cbc6e, please check neutron logs for more information. [ 733.087418] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 733.087418] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.087418] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 733.087418] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.087418] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 733.087418] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.087418] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 733.087418] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.087418] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 733.087418] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.087418] env[61852]: ERROR nova.compute.manager raise self.value [ 733.087418] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.087418] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 733.087418] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.087418] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 733.088131] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.088131] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 733.088131] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c3c54f6f-e903-4008-8261-667cea9cbc6e, please check neutron logs for more information. [ 733.088131] env[61852]: ERROR nova.compute.manager [ 733.088131] env[61852]: Traceback (most recent call last): [ 733.088131] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 733.088131] env[61852]: listener.cb(fileno) [ 733.088131] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.088131] env[61852]: result = function(*args, **kwargs) [ 733.088131] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 733.088131] env[61852]: return func(*args, **kwargs) [ 733.088131] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.088131] env[61852]: raise e [ 733.088131] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.088131] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 733.088131] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.088131] env[61852]: created_port_ids = self._update_ports_for_instance( [ 733.088131] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.088131] env[61852]: with excutils.save_and_reraise_exception(): [ 733.088131] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.088131] env[61852]: self.force_reraise() [ 733.088131] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.088131] env[61852]: raise self.value [ 733.088131] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.088131] env[61852]: updated_port = self._update_port( [ 733.088131] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.088131] env[61852]: _ensure_no_port_binding_failure(port) [ 733.088131] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.088131] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 733.089511] env[61852]: nova.exception.PortBindingFailed: Binding failed for port c3c54f6f-e903-4008-8261-667cea9cbc6e, please check neutron logs for more information. [ 733.089511] env[61852]: Removing descriptor: 19 [ 733.131862] env[61852]: DEBUG nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 733.190452] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.355826] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.919s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.357055] env[61852]: ERROR nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8025adf1-c695-4094-84cc-7d345f318195, please check neutron logs for more information. [ 733.357055] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Traceback (most recent call last): [ 733.357055] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 733.357055] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] self.driver.spawn(context, instance, image_meta, [ 733.357055] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 733.357055] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.357055] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 733.357055] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] vm_ref = self.build_virtual_machine(instance, [ 733.357055] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 733.357055] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] vif_infos = vmwarevif.get_vif_info(self._session, [ 733.357055] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] for vif in network_info: [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] return self._sync_wrapper(fn, *args, **kwargs) [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] self.wait() [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] self[:] = self._gt.wait() [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] return self._exit_event.wait() [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] result = hub.switch() [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 733.357455] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] return self.greenlet.switch() [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] result = function(*args, **kwargs) [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] return func(*args, **kwargs) [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] raise e [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] nwinfo = self.network_api.allocate_for_instance( [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] created_port_ids = self._update_ports_for_instance( [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] with excutils.save_and_reraise_exception(): [ 733.357817] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] self.force_reraise() [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] raise self.value [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] updated_port = self._update_port( [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] _ensure_no_port_binding_failure(port) [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] raise exception.PortBindingFailed(port_id=port['id']) [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] nova.exception.PortBindingFailed: Binding failed for port 8025adf1-c695-4094-84cc-7d345f318195, please check neutron logs for more information. [ 733.358226] env[61852]: ERROR nova.compute.manager [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] [ 733.358580] env[61852]: DEBUG nova.compute.utils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Binding failed for port 8025adf1-c695-4094-84cc-7d345f318195, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 733.358580] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.174s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.359836] env[61852]: INFO nova.compute.claims [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 733.362460] env[61852]: DEBUG nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Build of instance a77ddc8b-f3b2-4e13-944d-5cafecf59fae was re-scheduled: Binding failed for port 8025adf1-c695-4094-84cc-7d345f318195, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 733.363258] env[61852]: DEBUG nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 733.363258] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "refresh_cache-a77ddc8b-f3b2-4e13-944d-5cafecf59fae" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.363410] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "refresh_cache-a77ddc8b-f3b2-4e13-944d-5cafecf59fae" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.363589] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 733.451919] env[61852]: DEBUG nova.network.neutron [req-51617d8e-20c7-46d9-bdab-03106d47ea1b req-c8598945-e229-45f4-9ce6-7777202bf76b service nova] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.457160] env[61852]: DEBUG nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 733.481824] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 733.481824] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 733.481824] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.481996] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 733.481996] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.481996] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 733.482207] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 733.482391] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 733.482555] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 733.482715] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 733.482880] env[61852]: DEBUG nova.virt.hardware [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 733.483797] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbeebe6-2084-45a1-b6e8-3411800b3028 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.493672] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a280bc-148a-418b-8e60-59a17c0ca9e2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.507415] env[61852]: ERROR nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c3c54f6f-e903-4008-8261-667cea9cbc6e, please check neutron logs for more information. [ 733.507415] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Traceback (most recent call last): [ 733.507415] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 733.507415] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] yield resources [ 733.507415] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 733.507415] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] self.driver.spawn(context, instance, image_meta, [ 733.507415] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 733.507415] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.507415] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 733.507415] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] vm_ref = self.build_virtual_machine(instance, [ 733.507415] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] vif_infos = vmwarevif.get_vif_info(self._session, [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] for vif in network_info: [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] return self._sync_wrapper(fn, *args, **kwargs) [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] self.wait() [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] self[:] = self._gt.wait() [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] return self._exit_event.wait() [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 733.507811] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] current.throw(*self._exc) [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] result = function(*args, **kwargs) [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] return func(*args, **kwargs) [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] raise e [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] nwinfo = self.network_api.allocate_for_instance( [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] created_port_ids = self._update_ports_for_instance( [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] with excutils.save_and_reraise_exception(): [ 733.508362] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] self.force_reraise() [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] raise self.value [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] updated_port = self._update_port( [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] _ensure_no_port_binding_failure(port) [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] raise exception.PortBindingFailed(port_id=port['id']) [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] nova.exception.PortBindingFailed: Binding failed for port c3c54f6f-e903-4008-8261-667cea9cbc6e, please check neutron logs for more information. [ 733.508803] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] [ 733.508803] env[61852]: INFO nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Terminating instance [ 733.511330] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "refresh_cache-aae42775-cb43-4eee-967a-9ba0bdde7783" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.545704] env[61852]: DEBUG nova.network.neutron [req-51617d8e-20c7-46d9-bdab-03106d47ea1b req-c8598945-e229-45f4-9ce6-7777202bf76b service nova] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.653929] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.885976] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.971857] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.048579] env[61852]: DEBUG oslo_concurrency.lockutils [req-51617d8e-20c7-46d9-bdab-03106d47ea1b req-c8598945-e229-45f4-9ce6-7777202bf76b service nova] Releasing lock "refresh_cache-aae42775-cb43-4eee-967a-9ba0bdde7783" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.048993] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquired lock "refresh_cache-aae42775-cb43-4eee-967a-9ba0bdde7783" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.049199] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.478885] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "refresh_cache-a77ddc8b-f3b2-4e13-944d-5cafecf59fae" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.478885] env[61852]: DEBUG nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 734.478885] env[61852]: DEBUG nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 734.478885] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 734.509286] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.569225] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.700434] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.725966] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373447ff-9791-459e-9687-84676979aaf0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.733934] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3419535c-7c16-44fb-84e6-a409b4f2595f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.764995] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c69a174-5058-4c9b-bf56-8ca2a30f1cc0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.772069] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736c0612-3aec-4a56-a34a-c33c57d9dd2e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.785994] env[61852]: DEBUG nova.compute.provider_tree [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.962694] env[61852]: DEBUG nova.compute.manager [req-d9a0763e-9344-47f1-8ec4-bee9ef4750a3 req-91dc3e36-bbb9-41ad-9b1f-763d0f4f3a7a service nova] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Received event network-vif-deleted-c3c54f6f-e903-4008-8261-667cea9cbc6e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 735.016122] env[61852]: DEBUG nova.network.neutron [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.203397] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Releasing lock "refresh_cache-aae42775-cb43-4eee-967a-9ba0bdde7783" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.205172] env[61852]: DEBUG nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 735.205292] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 735.205657] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-000267f9-987d-41b8-84ff-2b0372c18885 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.215733] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b7e9b7-09b4-400a-9998-035d4d6aa307 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.238344] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aae42775-cb43-4eee-967a-9ba0bdde7783 could not be found. [ 735.238599] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 735.238847] env[61852]: INFO nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Took 0.03 seconds to destroy the instance on the hypervisor. [ 735.239112] env[61852]: DEBUG oslo.service.loopingcall [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 735.239329] env[61852]: DEBUG nova.compute.manager [-] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 735.239421] env[61852]: DEBUG nova.network.neutron [-] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 735.256740] env[61852]: DEBUG nova.network.neutron [-] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.289208] env[61852]: DEBUG nova.scheduler.client.report [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 735.518718] env[61852]: INFO nova.compute.manager [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: a77ddc8b-f3b2-4e13-944d-5cafecf59fae] Took 1.04 seconds to deallocate network for instance. [ 735.759194] env[61852]: DEBUG nova.network.neutron [-] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.794855] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.796153] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.597s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.798905] env[61852]: INFO nova.compute.claims [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.262500] env[61852]: INFO nova.compute.manager [-] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Took 1.02 seconds to deallocate network for instance. [ 736.264450] env[61852]: DEBUG nova.compute.claims [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 736.264625] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.303547] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Acquiring lock "6d382b25-941e-44e0-986d-8934d600a159" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.303784] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Lock "6d382b25-941e-44e0-986d-8934d600a159" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.549931] env[61852]: INFO nova.scheduler.client.report [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted allocations for instance a77ddc8b-f3b2-4e13-944d-5cafecf59fae [ 736.807863] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Lock "6d382b25-941e-44e0-986d-8934d600a159" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.504s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.808410] env[61852]: DEBUG nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 737.061338] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b0b567c6-bea1-4625-a34d-145acd654a91 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "a77ddc8b-f3b2-4e13-944d-5cafecf59fae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.123s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.103363] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec43fca-f90d-4e63-9e9b-6e75d7c9da88 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.112229] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090cd1b2-1101-40e2-a5cf-471ef6f5511c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.140863] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2154c0-ff26-4162-99c6-e21b655b2fa5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.148276] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36db6d21-6597-4cb4-9560-ebc47782db93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.161117] env[61852]: DEBUG nova.compute.provider_tree [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.315815] env[61852]: DEBUG nova.compute.utils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 737.317535] env[61852]: DEBUG nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 737.317702] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 737.362810] env[61852]: DEBUG nova.policy [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83553b766d7e40f69e173fdf2808d710', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2f6634eed9148288f456c1b86870cbc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 737.567344] env[61852]: DEBUG nova.compute.manager [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 737.608797] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Successfully created port: 14fb3d6f-2aea-4010-9c16-2afe3df02850 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.666615] env[61852]: DEBUG nova.scheduler.client.report [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 737.820574] env[61852]: DEBUG nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 738.095695] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.169371] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.169663] env[61852]: DEBUG nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 738.172548] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.947s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.174029] env[61852]: INFO nova.compute.claims [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.445730] env[61852]: DEBUG nova.compute.manager [req-1c881fe2-a5da-4d92-a1db-1d6030d327c0 req-276082e2-0a89-4668-95c8-3ce70d0e5d62 service nova] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Received event network-changed-14fb3d6f-2aea-4010-9c16-2afe3df02850 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 738.445929] env[61852]: DEBUG nova.compute.manager [req-1c881fe2-a5da-4d92-a1db-1d6030d327c0 req-276082e2-0a89-4668-95c8-3ce70d0e5d62 service nova] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Refreshing instance network info cache due to event network-changed-14fb3d6f-2aea-4010-9c16-2afe3df02850. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 738.446167] env[61852]: DEBUG oslo_concurrency.lockutils [req-1c881fe2-a5da-4d92-a1db-1d6030d327c0 req-276082e2-0a89-4668-95c8-3ce70d0e5d62 service nova] Acquiring lock "refresh_cache-b0e0fcf9-1630-49aa-b053-5498245313b0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.446313] env[61852]: DEBUG oslo_concurrency.lockutils [req-1c881fe2-a5da-4d92-a1db-1d6030d327c0 req-276082e2-0a89-4668-95c8-3ce70d0e5d62 service nova] Acquired lock "refresh_cache-b0e0fcf9-1630-49aa-b053-5498245313b0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.446471] env[61852]: DEBUG nova.network.neutron [req-1c881fe2-a5da-4d92-a1db-1d6030d327c0 req-276082e2-0a89-4668-95c8-3ce70d0e5d62 service nova] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Refreshing network info cache for port 14fb3d6f-2aea-4010-9c16-2afe3df02850 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.485710] env[61852]: ERROR nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 14fb3d6f-2aea-4010-9c16-2afe3df02850, please check neutron logs for more information. [ 738.485710] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 738.485710] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.485710] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 738.485710] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 738.485710] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 738.485710] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 738.485710] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 738.485710] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.485710] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 738.485710] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.485710] env[61852]: ERROR nova.compute.manager raise self.value [ 738.485710] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 738.485710] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 738.485710] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.485710] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 738.486489] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.486489] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 738.486489] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 14fb3d6f-2aea-4010-9c16-2afe3df02850, please check neutron logs for more information. [ 738.486489] env[61852]: ERROR nova.compute.manager [ 738.486489] env[61852]: Traceback (most recent call last): [ 738.486489] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 738.486489] env[61852]: listener.cb(fileno) [ 738.486489] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 738.486489] env[61852]: result = function(*args, **kwargs) [ 738.486489] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 738.486489] env[61852]: return func(*args, **kwargs) [ 738.486489] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 738.486489] env[61852]: raise e [ 738.486489] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.486489] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 738.486489] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 738.486489] env[61852]: created_port_ids = self._update_ports_for_instance( [ 738.486489] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 738.486489] env[61852]: with excutils.save_and_reraise_exception(): [ 738.486489] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.486489] env[61852]: self.force_reraise() [ 738.486489] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.486489] env[61852]: raise self.value [ 738.486489] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 738.486489] env[61852]: updated_port = self._update_port( [ 738.486489] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.486489] env[61852]: _ensure_no_port_binding_failure(port) [ 738.486489] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.486489] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 738.487748] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 14fb3d6f-2aea-4010-9c16-2afe3df02850, please check neutron logs for more information. [ 738.487748] env[61852]: Removing descriptor: 19 [ 738.678867] env[61852]: DEBUG nova.compute.utils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 738.683062] env[61852]: DEBUG nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 738.683062] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 738.729379] env[61852]: DEBUG nova.policy [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea9997671cc74dbba868d59f36d92dc1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87cfe1904375437c8ecf3da50b573d7f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 738.829383] env[61852]: DEBUG nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 738.861838] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 738.861838] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 738.861838] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.861838] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 738.862135] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.862179] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 738.862548] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 738.864577] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 738.864577] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 738.864577] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 738.864577] env[61852]: DEBUG nova.virt.hardware [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 738.864577] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b144de2-8620-4fc2-a856-1e70e56b7ca6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.875025] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5af7c2f-3485-4afd-804d-420e92a89422 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.888957] env[61852]: ERROR nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 14fb3d6f-2aea-4010-9c16-2afe3df02850, please check neutron logs for more information. [ 738.888957] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Traceback (most recent call last): [ 738.888957] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 738.888957] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] yield resources [ 738.888957] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 738.888957] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] self.driver.spawn(context, instance, image_meta, [ 738.888957] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 738.888957] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 738.888957] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 738.888957] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] vm_ref = self.build_virtual_machine(instance, [ 738.888957] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] vif_infos = vmwarevif.get_vif_info(self._session, [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] for vif in network_info: [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] return self._sync_wrapper(fn, *args, **kwargs) [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] self.wait() [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] self[:] = self._gt.wait() [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] return self._exit_event.wait() [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 738.889604] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] current.throw(*self._exc) [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] result = function(*args, **kwargs) [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] return func(*args, **kwargs) [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] raise e [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] nwinfo = self.network_api.allocate_for_instance( [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] created_port_ids = self._update_ports_for_instance( [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] with excutils.save_and_reraise_exception(): [ 738.890161] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] self.force_reraise() [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] raise self.value [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] updated_port = self._update_port( [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] _ensure_no_port_binding_failure(port) [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] raise exception.PortBindingFailed(port_id=port['id']) [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] nova.exception.PortBindingFailed: Binding failed for port 14fb3d6f-2aea-4010-9c16-2afe3df02850, please check neutron logs for more information. [ 738.890595] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] [ 738.890595] env[61852]: INFO nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Terminating instance [ 738.891482] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Acquiring lock "refresh_cache-b0e0fcf9-1630-49aa-b053-5498245313b0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.969593] env[61852]: DEBUG nova.network.neutron [req-1c881fe2-a5da-4d92-a1db-1d6030d327c0 req-276082e2-0a89-4668-95c8-3ce70d0e5d62 service nova] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.031847] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Successfully created port: cf33027c-9160-4caf-a467-36d5407375d0 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.050250] env[61852]: DEBUG nova.network.neutron [req-1c881fe2-a5da-4d92-a1db-1d6030d327c0 req-276082e2-0a89-4668-95c8-3ce70d0e5d62 service nova] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.189434] env[61852]: DEBUG nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 739.414182] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.414182] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.552560] env[61852]: DEBUG oslo_concurrency.lockutils [req-1c881fe2-a5da-4d92-a1db-1d6030d327c0 req-276082e2-0a89-4668-95c8-3ce70d0e5d62 service nova] Releasing lock "refresh_cache-b0e0fcf9-1630-49aa-b053-5498245313b0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.553169] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Acquired lock "refresh_cache-b0e0fcf9-1630-49aa-b053-5498245313b0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.553362] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.563893] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf03414-dfde-4357-b5c3-ae55cab99fbc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.573741] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57ea71b-f353-4d9a-9c92-afe14f9736bd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.603239] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03bfb26-d472-4ae5-9a15-3df0c1adb63e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.610636] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52ce2f8-fd69-49f8-84f0-4e1e61a5edc4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.623833] env[61852]: DEBUG nova.compute.provider_tree [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.698841] env[61852]: INFO nova.virt.block_device [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Booting with volume 982afabd-4c5b-42b2-a1c5-617ae99ad149 at /dev/sda [ 739.744742] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b77ebbdb-fdca-44dd-9de7-7f1ef15a2c6d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.753350] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e6c09f-e03d-4fe8-833a-f1aa1b239893 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.775416] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-378b7a3b-1a98-487a-a89c-29b976254b79 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.781943] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01771c9d-3b23-443e-8124-7f97eecf87b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.802589] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4a4c15-7f4f-4c99-9904-937a6690c0a4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.808562] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc24e67-3385-4f2c-95bc-f5dde48f3312 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.824081] env[61852]: DEBUG nova.virt.block_device [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Updating existing volume attachment record: 5a498b04-0ae0-4662-bec8-a7003c749fe3 {{(pid=61852) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 740.011290] env[61852]: ERROR nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cf33027c-9160-4caf-a467-36d5407375d0, please check neutron logs for more information. [ 740.011290] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 740.011290] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 740.011290] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 740.011290] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.011290] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 740.011290] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.011290] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 740.011290] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.011290] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 740.011290] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.011290] env[61852]: ERROR nova.compute.manager raise self.value [ 740.011290] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.011290] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 740.011290] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.011290] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 740.011959] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.011959] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 740.011959] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cf33027c-9160-4caf-a467-36d5407375d0, please check neutron logs for more information. [ 740.011959] env[61852]: ERROR nova.compute.manager [ 740.011959] env[61852]: Traceback (most recent call last): [ 740.011959] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 740.011959] env[61852]: listener.cb(fileno) [ 740.011959] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.011959] env[61852]: result = function(*args, **kwargs) [ 740.011959] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 740.011959] env[61852]: return func(*args, **kwargs) [ 740.011959] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 740.011959] env[61852]: raise e [ 740.011959] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 740.011959] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 740.011959] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.011959] env[61852]: created_port_ids = self._update_ports_for_instance( [ 740.011959] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.011959] env[61852]: with excutils.save_and_reraise_exception(): [ 740.011959] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.011959] env[61852]: self.force_reraise() [ 740.011959] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.011959] env[61852]: raise self.value [ 740.011959] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.011959] env[61852]: updated_port = self._update_port( [ 740.011959] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.011959] env[61852]: _ensure_no_port_binding_failure(port) [ 740.011959] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.011959] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 740.012915] env[61852]: nova.exception.PortBindingFailed: Binding failed for port cf33027c-9160-4caf-a467-36d5407375d0, please check neutron logs for more information. [ 740.012915] env[61852]: Removing descriptor: 19 [ 740.070054] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.126917] env[61852]: DEBUG nova.scheduler.client.report [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 740.140250] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.478260] env[61852]: DEBUG nova.compute.manager [req-897cbb20-611a-416d-b59b-231822d90eca req-8e9e238f-afa7-4703-afb4-bacc98f6b859 service nova] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Received event network-vif-deleted-14fb3d6f-2aea-4010-9c16-2afe3df02850 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 740.478467] env[61852]: DEBUG nova.compute.manager [req-897cbb20-611a-416d-b59b-231822d90eca req-8e9e238f-afa7-4703-afb4-bacc98f6b859 service nova] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Received event network-changed-cf33027c-9160-4caf-a467-36d5407375d0 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 740.478624] env[61852]: DEBUG nova.compute.manager [req-897cbb20-611a-416d-b59b-231822d90eca req-8e9e238f-afa7-4703-afb4-bacc98f6b859 service nova] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Refreshing instance network info cache due to event network-changed-cf33027c-9160-4caf-a467-36d5407375d0. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 740.478827] env[61852]: DEBUG oslo_concurrency.lockutils [req-897cbb20-611a-416d-b59b-231822d90eca req-8e9e238f-afa7-4703-afb4-bacc98f6b859 service nova] Acquiring lock "refresh_cache-e795b0f0-2c9c-4f44-9058-fbe706873d5a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.479169] env[61852]: DEBUG oslo_concurrency.lockutils [req-897cbb20-611a-416d-b59b-231822d90eca req-8e9e238f-afa7-4703-afb4-bacc98f6b859 service nova] Acquired lock "refresh_cache-e795b0f0-2c9c-4f44-9058-fbe706873d5a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.479342] env[61852]: DEBUG nova.network.neutron [req-897cbb20-611a-416d-b59b-231822d90eca req-8e9e238f-afa7-4703-afb4-bacc98f6b859 service nova] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Refreshing network info cache for port cf33027c-9160-4caf-a467-36d5407375d0 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 740.631829] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.632599] env[61852]: DEBUG nova.compute.manager [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 740.634924] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.124s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.642432] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Releasing lock "refresh_cache-b0e0fcf9-1630-49aa-b053-5498245313b0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.642915] env[61852]: DEBUG nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 740.643133] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 740.643424] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0a647ac-87ed-4606-b05e-2c91b75f1ae9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.652374] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8282a116-11e7-47a5-bb36-5d80169321b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.673301] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b0e0fcf9-1630-49aa-b053-5498245313b0 could not be found. [ 740.673535] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 740.673709] env[61852]: INFO nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 740.673949] env[61852]: DEBUG oslo.service.loopingcall [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.674171] env[61852]: DEBUG nova.compute.manager [-] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 740.674263] env[61852]: DEBUG nova.network.neutron [-] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 740.689489] env[61852]: DEBUG nova.network.neutron [-] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.997107] env[61852]: DEBUG nova.network.neutron [req-897cbb20-611a-416d-b59b-231822d90eca req-8e9e238f-afa7-4703-afb4-bacc98f6b859 service nova] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.071780] env[61852]: DEBUG nova.network.neutron [req-897cbb20-611a-416d-b59b-231822d90eca req-8e9e238f-afa7-4703-afb4-bacc98f6b859 service nova] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.138938] env[61852]: DEBUG nova.compute.utils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 741.144176] env[61852]: DEBUG nova.compute.manager [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Not allocating networking since 'none' was specified. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 741.192948] env[61852]: DEBUG nova.network.neutron [-] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.574522] env[61852]: DEBUG oslo_concurrency.lockutils [req-897cbb20-611a-416d-b59b-231822d90eca req-8e9e238f-afa7-4703-afb4-bacc98f6b859 service nova] Releasing lock "refresh_cache-e795b0f0-2c9c-4f44-9058-fbe706873d5a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.574522] env[61852]: DEBUG nova.compute.manager [req-897cbb20-611a-416d-b59b-231822d90eca req-8e9e238f-afa7-4703-afb4-bacc98f6b859 service nova] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Received event network-vif-deleted-cf33027c-9160-4caf-a467-36d5407375d0 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 741.645148] env[61852]: DEBUG nova.compute.manager [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 741.670481] env[61852]: WARNING nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 694889e8-200e-454c-9e87-60521dd044d9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 741.670858] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 5992f657-c29e-4da5-98f1-286a384ca0cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.670858] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d7ca3eac-9738-483a-ae14-67e17929a251 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.670992] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance bd549d69-403b-4c5c-9e08-0c84d32a7c0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.670992] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance aae42775-cb43-4eee-967a-9ba0bdde7783 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.671106] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance b0e0fcf9-1630-49aa-b053-5498245313b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.671218] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance e795b0f0-2c9c-4f44-9058-fbe706873d5a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.671328] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance be44214d-72dc-4517-a91a-7f659b5aa897 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.695030] env[61852]: INFO nova.compute.manager [-] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Took 1.02 seconds to deallocate network for instance. [ 741.697401] env[61852]: DEBUG nova.compute.claims [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 741.697615] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.928683] env[61852]: DEBUG nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 741.928997] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 741.929283] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 741.929385] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.929697] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 741.929697] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.929859] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 741.930105] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 741.930271] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 741.930506] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 741.930592] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 741.930759] env[61852]: DEBUG nova.virt.hardware [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 741.931653] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a81f36d-ac8a-4960-b748-a6781bcc1486 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.940239] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6412897e-f3c5-4751-beb4-387b84abbcb0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.954986] env[61852]: ERROR nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cf33027c-9160-4caf-a467-36d5407375d0, please check neutron logs for more information. [ 741.954986] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Traceback (most recent call last): [ 741.954986] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 741.954986] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] yield resources [ 741.954986] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 741.954986] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] self.driver.spawn(context, instance, image_meta, [ 741.954986] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 741.954986] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 741.954986] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 741.954986] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] vm_ref = self.build_virtual_machine(instance, [ 741.954986] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] vif_infos = vmwarevif.get_vif_info(self._session, [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] for vif in network_info: [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] return self._sync_wrapper(fn, *args, **kwargs) [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] self.wait() [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] self[:] = self._gt.wait() [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] return self._exit_event.wait() [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 741.955439] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] current.throw(*self._exc) [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] result = function(*args, **kwargs) [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] return func(*args, **kwargs) [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] raise e [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] nwinfo = self.network_api.allocate_for_instance( [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] created_port_ids = self._update_ports_for_instance( [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] with excutils.save_and_reraise_exception(): [ 741.955864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] self.force_reraise() [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] raise self.value [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] updated_port = self._update_port( [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] _ensure_no_port_binding_failure(port) [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] raise exception.PortBindingFailed(port_id=port['id']) [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] nova.exception.PortBindingFailed: Binding failed for port cf33027c-9160-4caf-a467-36d5407375d0, please check neutron logs for more information. [ 741.956301] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] [ 741.956301] env[61852]: INFO nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Terminating instance [ 741.958832] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Acquiring lock "refresh_cache-e795b0f0-2c9c-4f44-9058-fbe706873d5a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.958994] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Acquired lock "refresh_cache-e795b0f0-2c9c-4f44-9058-fbe706873d5a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.959175] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.174319] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d6a46605-aa45-4de3-80a8-cb73b9980669 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 742.475802] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.542219] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.657591] env[61852]: DEBUG nova.compute.manager [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 742.677716] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance b0433331-f005-49e0-bd22-bc78f970e3cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 742.685277] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 742.685561] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 742.685679] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.685856] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 742.686053] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.686155] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 742.686360] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 742.686538] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 742.686674] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 742.686828] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 742.686994] env[61852]: DEBUG nova.virt.hardware [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 742.687874] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aea7bcf-569b-416b-9983-584799a03946 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.697104] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72980b9c-1284-49bb-9ca1-e72cbdd1f3f1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.714726] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 742.720436] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Creating folder: Project (0573cfb91b7543089beee7ebcd4aac56). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 742.720758] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae8929cf-3026-45a4-a796-2126f6a9ea2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.731080] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Created folder: Project (0573cfb91b7543089beee7ebcd4aac56) in parent group-v277280. [ 742.731277] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Creating folder: Instances. Parent ref: group-v277297. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 742.731726] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd60cd7f-b4b9-4224-8cfc-d39e78d643e5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.740450] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Created folder: Instances in parent group-v277297. [ 742.740683] env[61852]: DEBUG oslo.service.loopingcall [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.740866] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 742.742971] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cad62328-dab9-46cb-8501-fb0bcbb31a44 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.757265] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 742.757265] env[61852]: value = "task-1292731" [ 742.757265] env[61852]: _type = "Task" [ 742.757265] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.766775] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292731, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.045189] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Releasing lock "refresh_cache-e795b0f0-2c9c-4f44-9058-fbe706873d5a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.046940] env[61852]: DEBUG nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 743.046940] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6041cf98-d59d-48ea-adc4-b9b6b5b44b64 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.056049] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a1dc6a-e4ef-4ebb-a829-e903dcd3d4c9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.076851] env[61852]: WARNING nova.virt.vmwareapi.driver [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance e795b0f0-2c9c-4f44-9058-fbe706873d5a could not be found. [ 743.077207] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 743.077381] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6770de7d-a9d9-4324-8c19-deb97427b6b1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.085042] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b8832f-156e-4e79-89fc-34d2c02ed860 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.106364] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e795b0f0-2c9c-4f44-9058-fbe706873d5a could not be found. [ 743.106586] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.106764] env[61852]: INFO nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Took 0.06 seconds to destroy the instance on the hypervisor. [ 743.107011] env[61852]: DEBUG oslo.service.loopingcall [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.107229] env[61852]: DEBUG nova.compute.manager [-] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 743.107323] env[61852]: DEBUG nova.network.neutron [-] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 743.122096] env[61852]: DEBUG nova.network.neutron [-] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.181122] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d48cefda-0b05-4ec0-8c1d-bc25cd491faf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.267881] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292731, 'name': CreateVM_Task, 'duration_secs': 0.280314} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.268049] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 743.268468] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.268628] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.268973] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 743.269251] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15852f07-e741-470e-b8a5-9024738cbca0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.273971] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 743.273971] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5220e391-bde3-1ef1-9f9c-d86c02eed3f9" [ 743.273971] env[61852]: _type = "Task" [ 743.273971] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.281086] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5220e391-bde3-1ef1-9f9c-d86c02eed3f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.625044] env[61852]: DEBUG nova.network.neutron [-] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.683633] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d3922357-383f-4f7e-9c76-4eb688a092b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.785446] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5220e391-bde3-1ef1-9f9c-d86c02eed3f9, 'name': SearchDatastore_Task, 'duration_secs': 0.009626} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.785744] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.785980] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.786240] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.786388] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.786561] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.786835] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33529347-9be7-4807-9d67-14656e4236a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.794852] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.795039] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 743.796209] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b0a5837-2d35-42c9-83d6-61f612115300 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.800783] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 743.800783] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523125d7-f61d-b9ba-b5b1-f6cab1edb724" [ 743.800783] env[61852]: _type = "Task" [ 743.800783] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.808826] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523125d7-f61d-b9ba-b5b1-f6cab1edb724, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.127965] env[61852]: INFO nova.compute.manager [-] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Took 1.02 seconds to deallocate network for instance. [ 744.186129] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance cb50d964-5c0e-4cf3-b652-0f7b7a488f91 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 744.311361] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523125d7-f61d-b9ba-b5b1-f6cab1edb724, 'name': SearchDatastore_Task, 'duration_secs': 0.008612} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.312086] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75fcd37e-3579-4091-a1ba-ded60b1e176d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.316851] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 744.316851] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e35066-848e-6484-fbdc-e2997b8503c2" [ 744.316851] env[61852]: _type = "Task" [ 744.316851] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.323929] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e35066-848e-6484-fbdc-e2997b8503c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.681026] env[61852]: INFO nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Took 0.55 seconds to detach 1 volumes for instance. [ 744.683590] env[61852]: DEBUG nova.compute.claims [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 744.684214] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.688670] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 46ccab1f-b7af-49df-a38d-af1fa3bac486 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 744.827253] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e35066-848e-6484-fbdc-e2997b8503c2, 'name': SearchDatastore_Task, 'duration_secs': 0.009737} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.827500] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.827753] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] be44214d-72dc-4517-a91a-7f659b5aa897/be44214d-72dc-4517-a91a-7f659b5aa897.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 744.828010] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44c8b014-d4c1-4bf2-afd4-ed5d5fa3e719 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.834937] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 744.834937] env[61852]: value = "task-1292732" [ 744.834937] env[61852]: _type = "Task" [ 744.834937] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.842333] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292732, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.191639] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance c94066d5-2e5f-4059-bdc5-385d517f1d84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 745.344910] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292732, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49228} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.345186] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] be44214d-72dc-4517-a91a-7f659b5aa897/be44214d-72dc-4517-a91a-7f659b5aa897.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 745.345400] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 745.346080] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8aa80d36-9647-4eab-a742-8bc6e5231ed0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.351235] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 745.351235] env[61852]: value = "task-1292733" [ 745.351235] env[61852]: _type = "Task" [ 745.351235] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.359616] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292733, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.695368] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance f18906e9-67b3-4537-9169-9d275e2ec4e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 745.862289] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292733, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062771} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.862549] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.863526] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e214e945-5d55-4127-b84d-36a3f50dd0f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.882264] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] be44214d-72dc-4517-a91a-7f659b5aa897/be44214d-72dc-4517-a91a-7f659b5aa897.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.882481] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c9bd03e-14d8-410f-9fac-4d0d55124a5f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.901421] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 745.901421] env[61852]: value = "task-1292734" [ 745.901421] env[61852]: _type = "Task" [ 745.901421] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.909071] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292734, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.198872] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8897a654-6805-45b0-b12b-16f7981d33ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 746.411483] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292734, 'name': ReconfigVM_Task, 'duration_secs': 0.277097} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.411764] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Reconfigured VM instance instance-00000031 to attach disk [datastore1] be44214d-72dc-4517-a91a-7f659b5aa897/be44214d-72dc-4517-a91a-7f659b5aa897.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.412423] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4bcf52b-f413-48bc-99c4-3acb21cdd5eb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.418467] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 746.418467] env[61852]: value = "task-1292735" [ 746.418467] env[61852]: _type = "Task" [ 746.418467] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.426508] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292735, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.702154] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance b0d38886-aacb-4b7e-9530-c5891d9cee66 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 746.928822] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292735, 'name': Rename_Task, 'duration_secs': 0.131716} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.928822] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 746.929139] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83b1a89e-f855-42ff-86c0-d1809b216bbc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.935438] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 746.935438] env[61852]: value = "task-1292736" [ 746.935438] env[61852]: _type = "Task" [ 746.935438] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.943554] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292736, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.205554] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8d733f93-7636-447b-a5d5-53c16c30061f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.445693] env[61852]: DEBUG oslo_vmware.api [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292736, 'name': PowerOnVM_Task, 'duration_secs': 0.406459} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.445954] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 747.446187] env[61852]: INFO nova.compute.manager [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Took 4.79 seconds to spawn the instance on the hypervisor. [ 747.446419] env[61852]: DEBUG nova.compute.manager [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 747.447149] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86fd2d3-db66-45cf-bd77-bd64c5a79ccd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.708416] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance e265a4be-7b37-40b5-a199-42a7cd945f66 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.963228] env[61852]: INFO nova.compute.manager [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Took 25.76 seconds to build instance. [ 748.211495] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 0ec1210f-7d42-4b71-abdc-9f818ffb91ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 748.465734] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccbd636a-0ea7-4b7a-bbb8-a4bf5c947681 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "be44214d-72dc-4517-a91a-7f659b5aa897" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.346s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.621288] env[61852]: INFO nova.compute.manager [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Rebuilding instance [ 748.663953] env[61852]: DEBUG nova.compute.manager [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 748.664874] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3f6788-a70b-476c-9335-dc1553086517 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.714736] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 89970cff-cb49-4803-81a5-1675b0ea4aaf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 748.968073] env[61852]: DEBUG nova.compute.manager [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 749.177559] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.177957] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82bab3b9-6cd1-470b-91b3-a3b1826a2048 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.186101] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 749.186101] env[61852]: value = "task-1292737" [ 749.186101] env[61852]: _type = "Task" [ 749.186101] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.193590] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292737, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.217597] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance f8ebb1b7-39c6-486e-ab25-23080d858846 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 749.489131] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.695210] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292737, 'name': PowerOffVM_Task, 'duration_secs': 0.167285} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.695460] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 749.695684] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.696426] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55836160-2a4c-4a0a-a20d-4d079f3f1443 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.702740] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 749.702948] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41ddc033-5b28-4534-9b41-cf9456d06049 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.722182] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 749.724331] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 749.724527] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 749.724698] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Deleting the datastore file [datastore1] be44214d-72dc-4517-a91a-7f659b5aa897 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 749.725118] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8a9250c-ee2f-4ba0-b584-4d4e1f44d1ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.730803] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 749.730803] env[61852]: value = "task-1292739" [ 749.730803] env[61852]: _type = "Task" [ 749.730803] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.738977] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.226293] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance b0f8f7dd-e559-43be-b541-c3da48a07d68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 750.240172] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088318} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.241053] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.241252] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 750.241428] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.729187] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 751.233312] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d93b8055-1eb2-4368-a051-289dc5a9d0ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 751.233654] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 751.233701] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 751.277997] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 751.278861] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 751.278861] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.278861] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 751.278861] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.279074] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 751.280791] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 751.280791] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 751.280791] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 751.280791] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 751.280791] env[61852]: DEBUG nova.virt.hardware [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 751.281058] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c171807-3c21-453e-b1a4-da054301b160 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.292159] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad20437-018b-4f38-985c-5dc5bde234f6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.309484] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.315264] env[61852]: DEBUG oslo.service.loopingcall [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.315522] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.315740] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60a98a62-ec45-491d-8fc1-757c1ab80cdb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.335089] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.335089] env[61852]: value = "task-1292740" [ 751.335089] env[61852]: _type = "Task" [ 751.335089] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.345063] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292740, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.588481] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d118135-8497-4aea-af77-a0c967838644 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.595612] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b7516b-7b07-4b05-8ac7-121d501c49c0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.624474] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08d40ae-c9ae-4fc5-b1cc-feadc53b5e2b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.631986] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2aa257-9f84-4399-ace5-33dbdd82753a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.647300] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.845309] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292740, 'name': CreateVM_Task, 'duration_secs': 0.261027} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.845483] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.846069] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.846141] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.846445] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 751.846689] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-017ab3c0-66ef-49ae-90fc-bf21fd7cbb92 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.850998] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 751.850998] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ba5b1d-7465-3dc9-6f89-9ca518d87341" [ 751.850998] env[61852]: _type = "Task" [ 751.850998] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.858538] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ba5b1d-7465-3dc9-6f89-9ca518d87341, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.150721] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 752.361413] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ba5b1d-7465-3dc9-6f89-9ca518d87341, 'name': SearchDatastore_Task, 'duration_secs': 0.009051} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.361727] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.361950] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 752.363175] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.363175] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.363175] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.363175] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fde5ce4-4529-43d0-8951-74caede0d283 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.370967] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.371160] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 752.371915] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd210683-5374-4c1b-bd90-0a7cd9eab297 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.378103] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 752.378103] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520df071-5395-ac26-2c8b-1a030fc04706" [ 752.378103] env[61852]: _type = "Task" [ 752.378103] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.385324] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520df071-5395-ac26-2c8b-1a030fc04706, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.656022] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 752.656312] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.021s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.656536] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.334s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.888433] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520df071-5395-ac26-2c8b-1a030fc04706, 'name': SearchDatastore_Task, 'duration_secs': 0.007744} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.889207] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c480864-08b5-4412-9ab9-dcaf36dc7abd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.894038] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 752.894038] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b74ad4-80d6-bcf7-df2c-91abfee1c98d" [ 752.894038] env[61852]: _type = "Task" [ 752.894038] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.901039] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b74ad4-80d6-bcf7-df2c-91abfee1c98d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.405229] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b74ad4-80d6-bcf7-df2c-91abfee1c98d, 'name': SearchDatastore_Task, 'duration_secs': 0.008973} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.407687] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.407955] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] be44214d-72dc-4517-a91a-7f659b5aa897/be44214d-72dc-4517-a91a-7f659b5aa897.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.409286] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69170570-4e19-4ae3-918e-2f821de4a852 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.416763] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 753.416763] env[61852]: value = "task-1292741" [ 753.416763] env[61852]: _type = "Task" [ 753.416763] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.427300] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.475939] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0a8470-66aa-421d-8f31-97c6aa33c529 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.483093] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d34af5-0d75-4db5-a7cb-eb0797fa805a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.511716] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a8166c-3d6e-4ad8-ac84-2746a78a863c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.518562] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2394c54f-2b36-4c60-9f4c-ca3ae026f99e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.531936] env[61852]: DEBUG nova.compute.provider_tree [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.926693] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292741, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.034841] env[61852]: DEBUG nova.scheduler.client.report [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 754.428427] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516697} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.428427] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] be44214d-72dc-4517-a91a-7f659b5aa897/be44214d-72dc-4517-a91a-7f659b5aa897.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.428771] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.428809] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59db76bf-baac-48e2-be30-1fcedab0e878 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.435608] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 754.435608] env[61852]: value = "task-1292742" [ 754.435608] env[61852]: _type = "Task" [ 754.435608] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.444708] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292742, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.540272] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.884s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.540935] env[61852]: ERROR nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0d927d4f-0ee3-47fd-8f50-9c9eac097544, please check neutron logs for more information. [ 754.540935] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Traceback (most recent call last): [ 754.540935] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 754.540935] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] self.driver.spawn(context, instance, image_meta, [ 754.540935] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 754.540935] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 754.540935] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 754.540935] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] vm_ref = self.build_virtual_machine(instance, [ 754.540935] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 754.540935] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 754.540935] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] for vif in network_info: [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] return self._sync_wrapper(fn, *args, **kwargs) [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] self.wait() [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] self[:] = self._gt.wait() [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] return self._exit_event.wait() [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] current.throw(*self._exc) [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 754.541321] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] result = function(*args, **kwargs) [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] return func(*args, **kwargs) [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] raise e [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] nwinfo = self.network_api.allocate_for_instance( [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] created_port_ids = self._update_ports_for_instance( [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] with excutils.save_and_reraise_exception(): [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] self.force_reraise() [ 754.541751] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 754.542189] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] raise self.value [ 754.542189] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 754.542189] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] updated_port = self._update_port( [ 754.542189] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 754.542189] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] _ensure_no_port_binding_failure(port) [ 754.542189] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 754.542189] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] raise exception.PortBindingFailed(port_id=port['id']) [ 754.542189] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] nova.exception.PortBindingFailed: Binding failed for port 0d927d4f-0ee3-47fd-8f50-9c9eac097544, please check neutron logs for more information. [ 754.542189] env[61852]: ERROR nova.compute.manager [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] [ 754.542189] env[61852]: DEBUG nova.compute.utils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Binding failed for port 0d927d4f-0ee3-47fd-8f50-9c9eac097544, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 754.543222] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.451s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.547725] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Build of instance 5992f657-c29e-4da5-98f1-286a384ca0cd was re-scheduled: Binding failed for port 0d927d4f-0ee3-47fd-8f50-9c9eac097544, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 754.547725] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 754.547725] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "refresh_cache-5992f657-c29e-4da5-98f1-286a384ca0cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.547725] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired lock "refresh_cache-5992f657-c29e-4da5-98f1-286a384ca0cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.548070] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 754.945084] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292742, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060117} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.945369] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.946169] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b52e09-00cb-44d5-a1cc-b51cd0fa3da9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.965020] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] be44214d-72dc-4517-a91a-7f659b5aa897/be44214d-72dc-4517-a91a-7f659b5aa897.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.965273] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3071ad39-b312-4162-b18b-abfe94c3df08 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.983899] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 754.983899] env[61852]: value = "task-1292743" [ 754.983899] env[61852]: _type = "Task" [ 754.983899] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.991025] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292743, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.070270] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.169031] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.374283] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865d20f8-ad77-4113-8e43-e29d23874f7f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.381384] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6527a8-ad24-47e8-acc5-b90670648c92 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.411104] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465d87c1-24b0-4076-96b3-9f723c8a95a4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.417819] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87d55ef-2574-4605-b7ea-08995ec1fc2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.430159] env[61852]: DEBUG nova.compute.provider_tree [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.493481] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292743, 'name': ReconfigVM_Task, 'duration_secs': 0.326831} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.493794] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Reconfigured VM instance instance-00000031 to attach disk [datastore1] be44214d-72dc-4517-a91a-7f659b5aa897/be44214d-72dc-4517-a91a-7f659b5aa897.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.494438] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a272d5e6-0c35-49c1-89a3-8653ce188fd5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.501157] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 755.501157] env[61852]: value = "task-1292744" [ 755.501157] env[61852]: _type = "Task" [ 755.501157] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.508669] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292744, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.671320] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Releasing lock "refresh_cache-5992f657-c29e-4da5-98f1-286a384ca0cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.671556] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 755.671742] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 755.671908] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 755.686915] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.933246] env[61852]: DEBUG nova.scheduler.client.report [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 756.010963] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292744, 'name': Rename_Task, 'duration_secs': 0.131139} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.011740] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.012289] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed44be40-0a64-4a4d-8385-2f8ad20306aa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.017885] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 756.017885] env[61852]: value = "task-1292745" [ 756.017885] env[61852]: _type = "Task" [ 756.017885] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.026515] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292745, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.189855] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.438047] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.895s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.438811] env[61852]: ERROR nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8a46cf7a-24ec-48dc-86fa-470a8270cb0d, please check neutron logs for more information. [ 756.438811] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Traceback (most recent call last): [ 756.438811] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 756.438811] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] self.driver.spawn(context, instance, image_meta, [ 756.438811] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 756.438811] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] self._vmops.spawn(context, instance, image_meta, injected_files, [ 756.438811] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 756.438811] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] vm_ref = self.build_virtual_machine(instance, [ 756.438811] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 756.438811] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] vif_infos = vmwarevif.get_vif_info(self._session, [ 756.438811] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] for vif in network_info: [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] return self._sync_wrapper(fn, *args, **kwargs) [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] self.wait() [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] self[:] = self._gt.wait() [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] return self._exit_event.wait() [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] result = hub.switch() [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 756.439249] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] return self.greenlet.switch() [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] result = function(*args, **kwargs) [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] return func(*args, **kwargs) [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] raise e [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] nwinfo = self.network_api.allocate_for_instance( [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] created_port_ids = self._update_ports_for_instance( [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] with excutils.save_and_reraise_exception(): [ 756.439622] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] self.force_reraise() [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] raise self.value [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] updated_port = self._update_port( [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] _ensure_no_port_binding_failure(port) [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] raise exception.PortBindingFailed(port_id=port['id']) [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] nova.exception.PortBindingFailed: Binding failed for port 8a46cf7a-24ec-48dc-86fa-470a8270cb0d, please check neutron logs for more information. [ 756.439992] env[61852]: ERROR nova.compute.manager [instance: d7ca3eac-9738-483a-ae14-67e17929a251] [ 756.440441] env[61852]: DEBUG nova.compute.utils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Binding failed for port 8a46cf7a-24ec-48dc-86fa-470a8270cb0d, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 756.440826] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.145s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.440998] env[61852]: DEBUG nova.objects.instance [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61852) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 756.443594] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Build of instance d7ca3eac-9738-483a-ae14-67e17929a251 was re-scheduled: Binding failed for port 8a46cf7a-24ec-48dc-86fa-470a8270cb0d, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 756.444027] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 756.444255] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "refresh_cache-d7ca3eac-9738-483a-ae14-67e17929a251" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.444401] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired lock "refresh_cache-d7ca3eac-9738-483a-ae14-67e17929a251" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.444557] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.528467] env[61852]: DEBUG oslo_vmware.api [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292745, 'name': PowerOnVM_Task, 'duration_secs': 0.412791} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.528743] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.528944] env[61852]: DEBUG nova.compute.manager [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 756.530393] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0d2a5e-11ac-4dbe-a565-df03b53b8db5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.692648] env[61852]: INFO nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 5992f657-c29e-4da5-98f1-286a384ca0cd] Took 1.02 seconds to deallocate network for instance. [ 756.969882] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.038101] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.048172] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.252308] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "be44214d-72dc-4517-a91a-7f659b5aa897" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.252618] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "be44214d-72dc-4517-a91a-7f659b5aa897" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.252879] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "be44214d-72dc-4517-a91a-7f659b5aa897-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.253123] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "be44214d-72dc-4517-a91a-7f659b5aa897-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.253414] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "be44214d-72dc-4517-a91a-7f659b5aa897-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.255344] env[61852]: INFO nova.compute.manager [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Terminating instance [ 757.257046] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "refresh_cache-be44214d-72dc-4517-a91a-7f659b5aa897" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.257243] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquired lock "refresh_cache-be44214d-72dc-4517-a91a-7f659b5aa897" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.257452] env[61852]: DEBUG nova.network.neutron [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 757.453451] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8194bf69-400a-4855-87ba-1751dd9217ae tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.454867] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.321s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.457088] env[61852]: INFO nova.compute.claims [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.541715] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Releasing lock "refresh_cache-d7ca3eac-9738-483a-ae14-67e17929a251" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.541910] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 757.542107] env[61852]: DEBUG nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 757.542303] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.557644] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.725065] env[61852]: INFO nova.scheduler.client.report [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Deleted allocations for instance 5992f657-c29e-4da5-98f1-286a384ca0cd [ 757.775724] env[61852]: DEBUG nova.network.neutron [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.819457] env[61852]: DEBUG nova.network.neutron [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.060353] env[61852]: DEBUG nova.network.neutron [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.232986] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "5992f657-c29e-4da5-98f1-286a384ca0cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.466s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.322824] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Releasing lock "refresh_cache-be44214d-72dc-4517-a91a-7f659b5aa897" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.323335] env[61852]: DEBUG nova.compute.manager [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 758.323543] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 758.324424] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa1103f-9c53-4d8c-b85d-4bc10528c919 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.331806] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 758.332053] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-900ac0fb-b9e0-44d3-a428-f4e4f2d4b2fa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.337430] env[61852]: DEBUG oslo_vmware.api [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 758.337430] env[61852]: value = "task-1292746" [ 758.337430] env[61852]: _type = "Task" [ 758.337430] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.345785] env[61852]: DEBUG oslo_vmware.api [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292746, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.562449] env[61852]: INFO nova.compute.manager [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: d7ca3eac-9738-483a-ae14-67e17929a251] Took 1.02 seconds to deallocate network for instance. [ 758.735160] env[61852]: DEBUG nova.compute.manager [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 758.766643] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228a6c52-cde3-445a-9cda-b0479583983a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.775014] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb42d38-94e7-43dc-aba1-cf6721992f4c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.804086] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f43b46f-6db0-4ef9-9e51-f18560971c54 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.811150] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b64e75-ae94-404c-bbf6-2b0dd09f379e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.824037] env[61852]: DEBUG nova.compute.provider_tree [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.846212] env[61852]: DEBUG oslo_vmware.api [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292746, 'name': PowerOffVM_Task, 'duration_secs': 0.197269} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.846443] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 758.846606] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 758.846823] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20951eb0-362b-4bbb-a5e6-9df3cac1cc7e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.871053] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 758.871324] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 758.871528] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Deleting the datastore file [datastore1] be44214d-72dc-4517-a91a-7f659b5aa897 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 758.871776] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee9b0936-1d90-4807-bdc6-dd78a8fca0d7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.877434] env[61852]: DEBUG oslo_vmware.api [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for the task: (returnval){ [ 758.877434] env[61852]: value = "task-1292748" [ 758.877434] env[61852]: _type = "Task" [ 758.877434] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.886160] env[61852]: DEBUG oslo_vmware.api [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292748, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.253556] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.326773] env[61852]: DEBUG nova.scheduler.client.report [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 759.386772] env[61852]: DEBUG oslo_vmware.api [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Task: {'id': task-1292748, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102957} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.387024] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 759.387209] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 759.387387] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 759.387553] env[61852]: INFO nova.compute.manager [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Took 1.06 seconds to destroy the instance on the hypervisor. [ 759.387791] env[61852]: DEBUG oslo.service.loopingcall [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.388043] env[61852]: DEBUG nova.compute.manager [-] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 759.388146] env[61852]: DEBUG nova.network.neutron [-] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 759.402566] env[61852]: DEBUG nova.network.neutron [-] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.586664] env[61852]: INFO nova.scheduler.client.report [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Deleted allocations for instance d7ca3eac-9738-483a-ae14-67e17929a251 [ 759.831981] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.832618] env[61852]: DEBUG nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 759.835782] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.386s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.904851] env[61852]: DEBUG nova.network.neutron [-] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.094236] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cd6eafa-9c50-4789-805c-2036e7203832 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "d7ca3eac-9738-483a-ae14-67e17929a251" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.302s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.340976] env[61852]: DEBUG nova.compute.utils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 760.345079] env[61852]: DEBUG nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 760.345221] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 760.391983] env[61852]: DEBUG nova.policy [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '088de3ba55b844388a072a6397543765', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9348e1bb3c6d4ababdba3cc38b2c4d64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 760.409947] env[61852]: INFO nova.compute.manager [-] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Took 1.02 seconds to deallocate network for instance. [ 760.597153] env[61852]: DEBUG nova.compute.manager [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 760.681290] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Successfully created port: 12409f55-90b1-4ec0-9fae-fd4c1612154c {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 760.685378] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee63e79-4dff-4637-bf74-e4ec08943a5c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.695167] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559b50b3-fc1f-4305-b5b5-b9707c9320eb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.727160] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2acb56-24ac-4df8-a615-c29f667f5bc9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.734727] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6439b1-b996-421c-8494-bc73761396b4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.747836] env[61852]: DEBUG nova.compute.provider_tree [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.845998] env[61852]: DEBUG nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 760.916685] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.127891] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.250673] env[61852]: DEBUG nova.scheduler.client.report [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 761.369756] env[61852]: DEBUG nova.compute.manager [req-6b3f4378-7e71-43b9-9626-565d2a6bcf61 req-6d4ff9b3-2218-4a79-bfff-904c7ea7baf7 service nova] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Received event network-changed-12409f55-90b1-4ec0-9fae-fd4c1612154c {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 761.369946] env[61852]: DEBUG nova.compute.manager [req-6b3f4378-7e71-43b9-9626-565d2a6bcf61 req-6d4ff9b3-2218-4a79-bfff-904c7ea7baf7 service nova] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Refreshing instance network info cache due to event network-changed-12409f55-90b1-4ec0-9fae-fd4c1612154c. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 761.370215] env[61852]: DEBUG oslo_concurrency.lockutils [req-6b3f4378-7e71-43b9-9626-565d2a6bcf61 req-6d4ff9b3-2218-4a79-bfff-904c7ea7baf7 service nova] Acquiring lock "refresh_cache-d6a46605-aa45-4de3-80a8-cb73b9980669" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.370413] env[61852]: DEBUG oslo_concurrency.lockutils [req-6b3f4378-7e71-43b9-9626-565d2a6bcf61 req-6d4ff9b3-2218-4a79-bfff-904c7ea7baf7 service nova] Acquired lock "refresh_cache-d6a46605-aa45-4de3-80a8-cb73b9980669" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.370602] env[61852]: DEBUG nova.network.neutron [req-6b3f4378-7e71-43b9-9626-565d2a6bcf61 req-6d4ff9b3-2218-4a79-bfff-904c7ea7baf7 service nova] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Refreshing network info cache for port 12409f55-90b1-4ec0-9fae-fd4c1612154c {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 761.564175] env[61852]: ERROR nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 12409f55-90b1-4ec0-9fae-fd4c1612154c, please check neutron logs for more information. [ 761.564175] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 761.564175] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.564175] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 761.564175] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 761.564175] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 761.564175] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 761.564175] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 761.564175] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.564175] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 761.564175] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.564175] env[61852]: ERROR nova.compute.manager raise self.value [ 761.564175] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 761.564175] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 761.564175] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.564175] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 761.564645] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.564645] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 761.564645] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 12409f55-90b1-4ec0-9fae-fd4c1612154c, please check neutron logs for more information. [ 761.564645] env[61852]: ERROR nova.compute.manager [ 761.564645] env[61852]: Traceback (most recent call last): [ 761.564645] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 761.564645] env[61852]: listener.cb(fileno) [ 761.564645] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.564645] env[61852]: result = function(*args, **kwargs) [ 761.564645] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 761.564645] env[61852]: return func(*args, **kwargs) [ 761.564645] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 761.564645] env[61852]: raise e [ 761.564645] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.564645] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 761.564645] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 761.564645] env[61852]: created_port_ids = self._update_ports_for_instance( [ 761.564645] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 761.564645] env[61852]: with excutils.save_and_reraise_exception(): [ 761.564645] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.564645] env[61852]: self.force_reraise() [ 761.564645] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.564645] env[61852]: raise self.value [ 761.564645] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 761.564645] env[61852]: updated_port = self._update_port( [ 761.564645] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.564645] env[61852]: _ensure_no_port_binding_failure(port) [ 761.564645] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.564645] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 761.565506] env[61852]: nova.exception.PortBindingFailed: Binding failed for port 12409f55-90b1-4ec0-9fae-fd4c1612154c, please check neutron logs for more information. [ 761.565506] env[61852]: Removing descriptor: 19 [ 761.755767] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.920s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.756581] env[61852]: ERROR nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8b029d4d-780a-46e9-8f80-1cccecf4f293, please check neutron logs for more information. [ 761.756581] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Traceback (most recent call last): [ 761.756581] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 761.756581] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] self.driver.spawn(context, instance, image_meta, [ 761.756581] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 761.756581] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 761.756581] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 761.756581] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] vm_ref = self.build_virtual_machine(instance, [ 761.756581] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 761.756581] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] vif_infos = vmwarevif.get_vif_info(self._session, [ 761.756581] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] for vif in network_info: [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] return self._sync_wrapper(fn, *args, **kwargs) [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] self.wait() [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] self[:] = self._gt.wait() [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] return self._exit_event.wait() [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] current.throw(*self._exc) [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.757038] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] result = function(*args, **kwargs) [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] return func(*args, **kwargs) [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] raise e [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] nwinfo = self.network_api.allocate_for_instance( [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] created_port_ids = self._update_ports_for_instance( [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] with excutils.save_and_reraise_exception(): [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] self.force_reraise() [ 761.757433] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.757793] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] raise self.value [ 761.757793] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 761.757793] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] updated_port = self._update_port( [ 761.757793] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.757793] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] _ensure_no_port_binding_failure(port) [ 761.757793] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.757793] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] raise exception.PortBindingFailed(port_id=port['id']) [ 761.757793] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] nova.exception.PortBindingFailed: Binding failed for port 8b029d4d-780a-46e9-8f80-1cccecf4f293, please check neutron logs for more information. [ 761.757793] env[61852]: ERROR nova.compute.manager [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] [ 761.757793] env[61852]: DEBUG nova.compute.utils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Binding failed for port 8b029d4d-780a-46e9-8f80-1cccecf4f293, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 761.758320] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.568s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.758505] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.760408] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.107s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.761862] env[61852]: INFO nova.compute.claims [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.766036] env[61852]: DEBUG nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Build of instance bd549d69-403b-4c5c-9e08-0c84d32a7c0a was re-scheduled: Binding failed for port 8b029d4d-780a-46e9-8f80-1cccecf4f293, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 761.766036] env[61852]: DEBUG nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 761.766036] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Acquiring lock "refresh_cache-bd549d69-403b-4c5c-9e08-0c84d32a7c0a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.766036] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Acquired lock "refresh_cache-bd549d69-403b-4c5c-9e08-0c84d32a7c0a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.766330] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 761.779961] env[61852]: INFO nova.scheduler.client.report [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Deleted allocations for instance 694889e8-200e-454c-9e87-60521dd044d9 [ 761.857425] env[61852]: DEBUG nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 761.883586] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 761.883940] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 761.884175] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 761.884431] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 761.884651] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 761.884870] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 761.885167] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 761.885420] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 761.885701] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 761.885967] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 761.886251] env[61852]: DEBUG nova.virt.hardware [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 761.887219] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045e14d5-fd0a-4cbc-a83b-f1d081c35b0f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.890445] env[61852]: DEBUG nova.network.neutron [req-6b3f4378-7e71-43b9-9626-565d2a6bcf61 req-6d4ff9b3-2218-4a79-bfff-904c7ea7baf7 service nova] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.897963] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf431df-d63d-4f84-9cb1-81588dc410f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.914091] env[61852]: ERROR nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 12409f55-90b1-4ec0-9fae-fd4c1612154c, please check neutron logs for more information. [ 761.914091] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Traceback (most recent call last): [ 761.914091] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 761.914091] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] yield resources [ 761.914091] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 761.914091] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] self.driver.spawn(context, instance, image_meta, [ 761.914091] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 761.914091] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] self._vmops.spawn(context, instance, image_meta, injected_files, [ 761.914091] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 761.914091] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] vm_ref = self.build_virtual_machine(instance, [ 761.914091] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] vif_infos = vmwarevif.get_vif_info(self._session, [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] for vif in network_info: [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] return self._sync_wrapper(fn, *args, **kwargs) [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] self.wait() [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] self[:] = self._gt.wait() [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] return self._exit_event.wait() [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 761.914635] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] current.throw(*self._exc) [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] result = function(*args, **kwargs) [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] return func(*args, **kwargs) [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] raise e [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] nwinfo = self.network_api.allocate_for_instance( [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] created_port_ids = self._update_ports_for_instance( [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] with excutils.save_and_reraise_exception(): [ 761.915080] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] self.force_reraise() [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] raise self.value [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] updated_port = self._update_port( [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] _ensure_no_port_binding_failure(port) [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] raise exception.PortBindingFailed(port_id=port['id']) [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] nova.exception.PortBindingFailed: Binding failed for port 12409f55-90b1-4ec0-9fae-fd4c1612154c, please check neutron logs for more information. [ 761.915491] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] [ 761.915491] env[61852]: INFO nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Terminating instance [ 761.917341] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquiring lock "refresh_cache-d6a46605-aa45-4de3-80a8-cb73b9980669" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.972433] env[61852]: DEBUG nova.network.neutron [req-6b3f4378-7e71-43b9-9626-565d2a6bcf61 req-6d4ff9b3-2218-4a79-bfff-904c7ea7baf7 service nova] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.126709] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "988c0a5c-b84d-44cf-9068-defd7132b0c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.126926] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "988c0a5c-b84d-44cf-9068-defd7132b0c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.154181] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "f48b40ab-23f2-4071-8168-e7e2411ad64d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.154181] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "f48b40ab-23f2-4071-8168-e7e2411ad64d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.284214] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.288035] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24d24eb3-912e-4833-9d99-eb89264ba41e tempest-ServerShowV257Test-2084054151 tempest-ServerShowV257Test-2084054151-project-member] Lock "694889e8-200e-454c-9e87-60521dd044d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.788s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.360791] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.475781] env[61852]: DEBUG oslo_concurrency.lockutils [req-6b3f4378-7e71-43b9-9626-565d2a6bcf61 req-6d4ff9b3-2218-4a79-bfff-904c7ea7baf7 service nova] Releasing lock "refresh_cache-d6a46605-aa45-4de3-80a8-cb73b9980669" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.475887] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquired lock "refresh_cache-d6a46605-aa45-4de3-80a8-cb73b9980669" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.476083] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.844923] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.844923] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.864057] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Releasing lock "refresh_cache-bd549d69-403b-4c5c-9e08-0c84d32a7c0a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.864285] env[61852]: DEBUG nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 762.864465] env[61852]: DEBUG nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 762.864739] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 762.881283] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.998436] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.093232] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.126035] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e86893c-f9d0-466a-b63e-2993146feb81 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.134208] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65ed009-9c3e-447d-8a16-a92e363691a6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.165691] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93757678-bd8b-4548-ad81-844d213598f1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.172939] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93897feb-3909-46b5-a36e-24ae24f874b6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.187873] env[61852]: DEBUG nova.compute.provider_tree [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.356345] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.356345] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 763.356345] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Rebuilding the list of instances to heal {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 763.385015] env[61852]: DEBUG nova.network.neutron [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.392614] env[61852]: DEBUG nova.compute.manager [req-6cac70a0-8b07-4c5b-8013-f4a55a23c302 req-d40dee22-af3b-4433-9acd-b5aeeb264e79 service nova] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Received event network-vif-deleted-12409f55-90b1-4ec0-9fae-fd4c1612154c {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 763.599034] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Releasing lock "refresh_cache-d6a46605-aa45-4de3-80a8-cb73b9980669" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.599034] env[61852]: DEBUG nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 763.599034] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 763.599034] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b81d9c0-43da-4620-8357-7d4cf3b80149 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.607976] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faef666f-1e75-4cd2-8502-2db9eebb450e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.630236] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d6a46605-aa45-4de3-80a8-cb73b9980669 could not be found. [ 763.630612] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.631089] env[61852]: INFO nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Took 0.03 seconds to destroy the instance on the hypervisor. [ 763.631382] env[61852]: DEBUG oslo.service.loopingcall [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 763.631606] env[61852]: DEBUG nova.compute.manager [-] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 763.631704] env[61852]: DEBUG nova.network.neutron [-] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.648818] env[61852]: DEBUG nova.network.neutron [-] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.692372] env[61852]: DEBUG nova.scheduler.client.report [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 763.861024] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 763.861024] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 763.861024] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 763.861024] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 763.861024] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 763.886945] env[61852]: INFO nova.compute.manager [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] [instance: bd549d69-403b-4c5c-9e08-0c84d32a7c0a] Took 1.02 seconds to deallocate network for instance. [ 763.894453] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "refresh_cache-be44214d-72dc-4517-a91a-7f659b5aa897" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.894453] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquired lock "refresh_cache-be44214d-72dc-4517-a91a-7f659b5aa897" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.894453] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Forcefully refreshing network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 763.894453] env[61852]: DEBUG nova.objects.instance [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lazy-loading 'info_cache' on Instance uuid be44214d-72dc-4517-a91a-7f659b5aa897 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 764.154146] env[61852]: DEBUG nova.network.neutron [-] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.197636] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.197636] env[61852]: DEBUG nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 764.201028] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.935s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.657023] env[61852]: INFO nova.compute.manager [-] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Took 1.03 seconds to deallocate network for instance. [ 764.659395] env[61852]: DEBUG nova.compute.claims [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 764.659571] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.709126] env[61852]: DEBUG nova.compute.utils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 764.723028] env[61852]: DEBUG nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 764.723028] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 764.794164] env[61852]: DEBUG nova.policy [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22b0c1d5d7614e18af201563c41a64f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b16f796a681641bcab2679adc24e753a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 764.926230] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.935940] env[61852]: INFO nova.scheduler.client.report [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Deleted allocations for instance bd549d69-403b-4c5c-9e08-0c84d32a7c0a [ 765.104555] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef48064-1555-4b1d-a3f4-929bb0cdf75a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.114140] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1ac38b-e41c-46e6-aa7d-53b6ab94012e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.150493] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af1ec05-afac-4c11-b3c9-b1eef3aab0bb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.156194] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Successfully created port: eca54373-1f16-4210-8551-85373b0ac57c {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.161342] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374f9a96-614a-4703-9b05-acfc1b1cc8f8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.177434] env[61852]: DEBUG nova.compute.provider_tree [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.217589] env[61852]: DEBUG nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 765.449136] env[61852]: DEBUG oslo_concurrency.lockutils [None req-739a61a4-54aa-45ce-9d14-74f20f4c594b tempest-ServersV294TestFqdnHostnames-634103038 tempest-ServersV294TestFqdnHostnames-634103038-project-member] Lock "bd549d69-403b-4c5c-9e08-0c84d32a7c0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.785s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.559212] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.678962] env[61852]: DEBUG nova.scheduler.client.report [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.952556] env[61852]: DEBUG nova.compute.manager [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 766.065692] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Releasing lock "refresh_cache-be44214d-72dc-4517-a91a-7f659b5aa897" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.065918] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Updated the network info_cache for instance {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 766.066130] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 766.066295] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 766.066440] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 766.066631] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 766.066720] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 766.066860] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 766.066988] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 766.067154] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 766.187074] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.985s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.187074] env[61852]: ERROR nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c3c54f6f-e903-4008-8261-667cea9cbc6e, please check neutron logs for more information. [ 766.187074] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Traceback (most recent call last): [ 766.187074] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 766.187074] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] self.driver.spawn(context, instance, image_meta, [ 766.187074] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 766.187074] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] self._vmops.spawn(context, instance, image_meta, injected_files, [ 766.187074] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 766.187074] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] vm_ref = self.build_virtual_machine(instance, [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] vif_infos = vmwarevif.get_vif_info(self._session, [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] for vif in network_info: [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] return self._sync_wrapper(fn, *args, **kwargs) [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] self.wait() [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] self[:] = self._gt.wait() [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] return self._exit_event.wait() [ 766.187379] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] current.throw(*self._exc) [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] result = function(*args, **kwargs) [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] return func(*args, **kwargs) [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] raise e [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] nwinfo = self.network_api.allocate_for_instance( [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] created_port_ids = self._update_ports_for_instance( [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 766.187742] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] with excutils.save_and_reraise_exception(): [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] self.force_reraise() [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] raise self.value [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] updated_port = self._update_port( [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] _ensure_no_port_binding_failure(port) [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] raise exception.PortBindingFailed(port_id=port['id']) [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] nova.exception.PortBindingFailed: Binding failed for port c3c54f6f-e903-4008-8261-667cea9cbc6e, please check neutron logs for more information. [ 766.188140] env[61852]: ERROR nova.compute.manager [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] [ 766.188512] env[61852]: DEBUG nova.compute.utils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Binding failed for port c3c54f6f-e903-4008-8261-667cea9cbc6e, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 766.189683] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.093s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.191388] env[61852]: INFO nova.compute.claims [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.193248] env[61852]: DEBUG nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Build of instance aae42775-cb43-4eee-967a-9ba0bdde7783 was re-scheduled: Binding failed for port c3c54f6f-e903-4008-8261-667cea9cbc6e, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 766.193894] env[61852]: DEBUG nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 766.194184] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "refresh_cache-aae42775-cb43-4eee-967a-9ba0bdde7783" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.194379] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquired lock "refresh_cache-aae42775-cb43-4eee-967a-9ba0bdde7783" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.194609] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 766.227603] env[61852]: DEBUG nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 766.265139] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 766.265580] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 766.265580] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 766.267244] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 766.267244] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 766.267244] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 766.267244] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 766.267244] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 766.267886] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 766.267886] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 766.267886] env[61852]: DEBUG nova.virt.hardware [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 766.267886] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3aaa79-a8e0-4a8c-9095-9c4df51d83b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.276659] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879c0499-2a11-41e6-af3d-ea3099de0d97 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.486593] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.573259] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.761157] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.793118] env[61852]: DEBUG nova.compute.manager [req-18b21a90-7a3b-4d3b-9057-218160179f96 req-99230e73-a517-4f3c-bfdd-56e9e4c8f875 service nova] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Received event network-changed-eca54373-1f16-4210-8551-85373b0ac57c {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 766.793118] env[61852]: DEBUG nova.compute.manager [req-18b21a90-7a3b-4d3b-9057-218160179f96 req-99230e73-a517-4f3c-bfdd-56e9e4c8f875 service nova] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Refreshing instance network info cache due to event network-changed-eca54373-1f16-4210-8551-85373b0ac57c. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 766.793118] env[61852]: DEBUG oslo_concurrency.lockutils [req-18b21a90-7a3b-4d3b-9057-218160179f96 req-99230e73-a517-4f3c-bfdd-56e9e4c8f875 service nova] Acquiring lock "refresh_cache-b0433331-f005-49e0-bd22-bc78f970e3cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.793118] env[61852]: DEBUG oslo_concurrency.lockutils [req-18b21a90-7a3b-4d3b-9057-218160179f96 req-99230e73-a517-4f3c-bfdd-56e9e4c8f875 service nova] Acquired lock "refresh_cache-b0433331-f005-49e0-bd22-bc78f970e3cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.793118] env[61852]: DEBUG nova.network.neutron [req-18b21a90-7a3b-4d3b-9057-218160179f96 req-99230e73-a517-4f3c-bfdd-56e9e4c8f875 service nova] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Refreshing network info cache for port eca54373-1f16-4210-8551-85373b0ac57c {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.893981] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.960361] env[61852]: ERROR nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port eca54373-1f16-4210-8551-85373b0ac57c, please check neutron logs for more information. [ 766.960361] env[61852]: ERROR nova.compute.manager Traceback (most recent call last): [ 766.960361] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 766.960361] env[61852]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 766.960361] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 766.960361] env[61852]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 766.960361] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 766.960361] env[61852]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 766.960361] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.960361] env[61852]: ERROR nova.compute.manager self.force_reraise() [ 766.960361] env[61852]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.960361] env[61852]: ERROR nova.compute.manager raise self.value [ 766.960361] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 766.960361] env[61852]: ERROR nova.compute.manager updated_port = self._update_port( [ 766.960361] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.960361] env[61852]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 766.961109] env[61852]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.961109] env[61852]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 766.961109] env[61852]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port eca54373-1f16-4210-8551-85373b0ac57c, please check neutron logs for more information. [ 766.961109] env[61852]: ERROR nova.compute.manager [ 766.961109] env[61852]: Traceback (most recent call last): [ 766.961109] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 766.961109] env[61852]: listener.cb(fileno) [ 766.961109] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 766.961109] env[61852]: result = function(*args, **kwargs) [ 766.961109] env[61852]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 766.961109] env[61852]: return func(*args, **kwargs) [ 766.961109] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 766.961109] env[61852]: raise e [ 766.961109] env[61852]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 766.961109] env[61852]: nwinfo = self.network_api.allocate_for_instance( [ 766.961109] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 766.961109] env[61852]: created_port_ids = self._update_ports_for_instance( [ 766.961109] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 766.961109] env[61852]: with excutils.save_and_reraise_exception(): [ 766.961109] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.961109] env[61852]: self.force_reraise() [ 766.961109] env[61852]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.961109] env[61852]: raise self.value [ 766.961109] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 766.961109] env[61852]: updated_port = self._update_port( [ 766.961109] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.961109] env[61852]: _ensure_no_port_binding_failure(port) [ 766.961109] env[61852]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.961109] env[61852]: raise exception.PortBindingFailed(port_id=port['id']) [ 766.962063] env[61852]: nova.exception.PortBindingFailed: Binding failed for port eca54373-1f16-4210-8551-85373b0ac57c, please check neutron logs for more information. [ 766.962063] env[61852]: Removing descriptor: 19 [ 766.962063] env[61852]: ERROR nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port eca54373-1f16-4210-8551-85373b0ac57c, please check neutron logs for more information. [ 766.962063] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Traceback (most recent call last): [ 766.962063] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 766.962063] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] yield resources [ 766.962063] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 766.962063] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] self.driver.spawn(context, instance, image_meta, [ 766.962063] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 766.962063] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 766.962063] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 766.962063] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] vm_ref = self.build_virtual_machine(instance, [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] for vif in network_info: [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] return self._sync_wrapper(fn, *args, **kwargs) [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] self.wait() [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] self[:] = self._gt.wait() [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] return self._exit_event.wait() [ 766.962492] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] result = hub.switch() [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] return self.greenlet.switch() [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] result = function(*args, **kwargs) [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] return func(*args, **kwargs) [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] raise e [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] nwinfo = self.network_api.allocate_for_instance( [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 766.962922] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] created_port_ids = self._update_ports_for_instance( [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] with excutils.save_and_reraise_exception(): [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] self.force_reraise() [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] raise self.value [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] updated_port = self._update_port( [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] _ensure_no_port_binding_failure(port) [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.963389] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] raise exception.PortBindingFailed(port_id=port['id']) [ 766.963786] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] nova.exception.PortBindingFailed: Binding failed for port eca54373-1f16-4210-8551-85373b0ac57c, please check neutron logs for more information. [ 766.963786] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] [ 766.963786] env[61852]: INFO nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Terminating instance [ 766.964073] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "refresh_cache-b0433331-f005-49e0-bd22-bc78f970e3cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.325720] env[61852]: DEBUG nova.network.neutron [req-18b21a90-7a3b-4d3b-9057-218160179f96 req-99230e73-a517-4f3c-bfdd-56e9e4c8f875 service nova] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.402367] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Releasing lock "refresh_cache-aae42775-cb43-4eee-967a-9ba0bdde7783" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.402612] env[61852]: DEBUG nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 767.402792] env[61852]: DEBUG nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 767.402963] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 767.415613] env[61852]: DEBUG nova.network.neutron [req-18b21a90-7a3b-4d3b-9057-218160179f96 req-99230e73-a517-4f3c-bfdd-56e9e4c8f875 service nova] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.449296] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.614790] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5fe4c8-735d-463b-b567-fe321575bdf5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.626526] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2f9a0e-e543-41d5-9f90-41f19432b6dd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.660993] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8130e1-00d0-4ad9-bb51-b4be52320f70 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.668751] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43e426b-a195-49d0-8fd5-7daec87ef323 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.681944] env[61852]: DEBUG nova.compute.provider_tree [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.918816] env[61852]: DEBUG oslo_concurrency.lockutils [req-18b21a90-7a3b-4d3b-9057-218160179f96 req-99230e73-a517-4f3c-bfdd-56e9e4c8f875 service nova] Releasing lock "refresh_cache-b0433331-f005-49e0-bd22-bc78f970e3cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.919268] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquired lock "refresh_cache-b0433331-f005-49e0-bd22-bc78f970e3cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.919455] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.951085] env[61852]: DEBUG nova.network.neutron [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.185093] env[61852]: DEBUG nova.scheduler.client.report [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 768.442356] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.458594] env[61852]: INFO nova.compute.manager [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: aae42775-cb43-4eee-967a-9ba0bdde7783] Took 1.06 seconds to deallocate network for instance. [ 768.542142] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.697018] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.505s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.697018] env[61852]: DEBUG nova.compute.manager [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 768.697819] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.045967] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Releasing lock "refresh_cache-b0433331-f005-49e0-bd22-bc78f970e3cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.046432] env[61852]: DEBUG nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 769.046614] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 769.046921] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81149952-16ac-4970-ba0b-5f485a1680a7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.059899] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c7e0ec-596d-4f61-bfe8-c6030b04eb4e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.074650] env[61852]: DEBUG nova.compute.manager [req-2391b2cb-de72-4f30-8af0-e00661fac9a3 req-61bac055-7577-4eff-9c67-da8cff091f4e service nova] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Received event network-vif-deleted-eca54373-1f16-4210-8551-85373b0ac57c {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 769.090352] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b0433331-f005-49e0-bd22-bc78f970e3cd could not be found. [ 769.090352] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 769.090352] env[61852]: INFO nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 769.090352] env[61852]: DEBUG oslo.service.loopingcall [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 769.090352] env[61852]: DEBUG nova.compute.manager [-] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 769.090352] env[61852]: DEBUG nova.network.neutron [-] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 769.133205] env[61852]: DEBUG nova.network.neutron [-] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.204502] env[61852]: DEBUG nova.compute.utils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 769.210444] env[61852]: DEBUG nova.compute.manager [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 769.210444] env[61852]: DEBUG nova.network.neutron [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 769.289228] env[61852]: DEBUG nova.policy [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22b0c1d5d7614e18af201563c41a64f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b16f796a681641bcab2679adc24e753a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 769.494950] env[61852]: INFO nova.scheduler.client.report [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Deleted allocations for instance aae42775-cb43-4eee-967a-9ba0bdde7783 [ 769.635961] env[61852]: DEBUG nova.network.neutron [-] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.642690] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edcbf66-ad79-41d8-86eb-0b301b9390c1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.654348] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83dd617-f313-4d36-8f5c-6201c703c68d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.723186] env[61852]: DEBUG nova.compute.manager [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 769.729946] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290ad954-aff4-4d23-86b7-5fbc3a100cc4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.740036] env[61852]: DEBUG nova.network.neutron [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Successfully created port: 916e36f2-cfed-41bf-bad3-33d92e9ef290 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.744208] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquiring lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.744534] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.752558] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34b3d3d-2b03-47a3-b803-0194b4692f11 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.777978] env[61852]: DEBUG nova.compute.provider_tree [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.007648] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1d77ef1c-ec11-45c9-9b41-e9af77c3f17e tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "aae42775-cb43-4eee-967a-9ba0bdde7783" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 140.282s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.148146] env[61852]: INFO nova.compute.manager [-] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Took 1.06 seconds to deallocate network for instance. [ 770.150990] env[61852]: DEBUG nova.compute.claims [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Aborting claim: {{(pid=61852) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 770.151215] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.281849] env[61852]: DEBUG nova.scheduler.client.report [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 770.511190] env[61852]: DEBUG nova.compute.manager [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 770.738293] env[61852]: DEBUG nova.compute.manager [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 770.770498] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 770.770777] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 770.770934] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.771267] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 770.771431] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.771596] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 770.771901] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 770.771943] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 770.772554] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 770.772767] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 770.772963] env[61852]: DEBUG nova.virt.hardware [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 770.773935] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa695a4-16e3-4aec-acac-1fb597025038 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.787090] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73a070a-b2db-4c25-9d2e-ccdaa5a0d337 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.792973] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.095s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.793603] env[61852]: ERROR nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 14fb3d6f-2aea-4010-9c16-2afe3df02850, please check neutron logs for more information. [ 770.793603] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Traceback (most recent call last): [ 770.793603] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 770.793603] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] self.driver.spawn(context, instance, image_meta, [ 770.793603] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 770.793603] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 770.793603] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 770.793603] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] vm_ref = self.build_virtual_machine(instance, [ 770.793603] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 770.793603] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] vif_infos = vmwarevif.get_vif_info(self._session, [ 770.793603] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] for vif in network_info: [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] return self._sync_wrapper(fn, *args, **kwargs) [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] self.wait() [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] self[:] = self._gt.wait() [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] return self._exit_event.wait() [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] current.throw(*self._exc) [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 770.794353] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] result = function(*args, **kwargs) [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] return func(*args, **kwargs) [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] raise e [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] nwinfo = self.network_api.allocate_for_instance( [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] created_port_ids = self._update_ports_for_instance( [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] with excutils.save_and_reraise_exception(): [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] self.force_reraise() [ 770.794940] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.796662] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] raise self.value [ 770.796662] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.796662] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] updated_port = self._update_port( [ 770.796662] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.796662] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] _ensure_no_port_binding_failure(port) [ 770.796662] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.796662] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] raise exception.PortBindingFailed(port_id=port['id']) [ 770.796662] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] nova.exception.PortBindingFailed: Binding failed for port 14fb3d6f-2aea-4010-9c16-2afe3df02850, please check neutron logs for more information. [ 770.796662] env[61852]: ERROR nova.compute.manager [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] [ 770.796662] env[61852]: DEBUG nova.compute.utils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Binding failed for port 14fb3d6f-2aea-4010-9c16-2afe3df02850, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 770.797233] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.112s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.803984] env[61852]: DEBUG nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Build of instance b0e0fcf9-1630-49aa-b053-5498245313b0 was re-scheduled: Binding failed for port 14fb3d6f-2aea-4010-9c16-2afe3df02850, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 770.803984] env[61852]: DEBUG nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 770.803984] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Acquiring lock "refresh_cache-b0e0fcf9-1630-49aa-b053-5498245313b0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.803984] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Acquired lock "refresh_cache-b0e0fcf9-1630-49aa-b053-5498245313b0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.804558] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.034725] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.145877] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "21d74604-6a64-44ee-a012-ebff7166853e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.145877] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "21d74604-6a64-44ee-a012-ebff7166853e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.336720] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.445298] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.748572] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a769902-e74f-4d97-aafc-0c39072f15f9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.756087] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a175b0-dd7d-4a58-93be-28d75829fd8e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.793958] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c787de4a-9cab-472d-a89b-fe4800529a2f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.803183] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2955e5c7-11c9-4e8b-8b9b-4533dafe50d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.817296] env[61852]: DEBUG nova.compute.provider_tree [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.947925] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Releasing lock "refresh_cache-b0e0fcf9-1630-49aa-b053-5498245313b0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.948252] env[61852]: DEBUG nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 771.948410] env[61852]: DEBUG nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 771.948585] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 771.952806] env[61852]: DEBUG nova.compute.manager [req-13a2662c-b715-4957-86e6-0d4577f73d02 req-175c105d-2353-456f-a1e6-342c690b3465 service nova] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Received event network-vif-plugged-916e36f2-cfed-41bf-bad3-33d92e9ef290 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 771.953544] env[61852]: DEBUG oslo_concurrency.lockutils [req-13a2662c-b715-4957-86e6-0d4577f73d02 req-175c105d-2353-456f-a1e6-342c690b3465 service nova] Acquiring lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.953611] env[61852]: DEBUG oslo_concurrency.lockutils [req-13a2662c-b715-4957-86e6-0d4577f73d02 req-175c105d-2353-456f-a1e6-342c690b3465 service nova] Lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.953776] env[61852]: DEBUG oslo_concurrency.lockutils [req-13a2662c-b715-4957-86e6-0d4577f73d02 req-175c105d-2353-456f-a1e6-342c690b3465 service nova] Lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.953875] env[61852]: DEBUG nova.compute.manager [req-13a2662c-b715-4957-86e6-0d4577f73d02 req-175c105d-2353-456f-a1e6-342c690b3465 service nova] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] No waiting events found dispatching network-vif-plugged-916e36f2-cfed-41bf-bad3-33d92e9ef290 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 771.954039] env[61852]: WARNING nova.compute.manager [req-13a2662c-b715-4957-86e6-0d4577f73d02 req-175c105d-2353-456f-a1e6-342c690b3465 service nova] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Received unexpected event network-vif-plugged-916e36f2-cfed-41bf-bad3-33d92e9ef290 for instance with vm_state building and task_state spawning. [ 771.964376] env[61852]: DEBUG nova.network.neutron [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Successfully updated port: 916e36f2-cfed-41bf-bad3-33d92e9ef290 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 772.016408] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.320867] env[61852]: DEBUG nova.scheduler.client.report [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 772.467329] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "refresh_cache-d48cefda-0b05-4ec0-8c1d-bc25cd491faf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.467562] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquired lock "refresh_cache-d48cefda-0b05-4ec0-8c1d-bc25cd491faf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.467627] env[61852]: DEBUG nova.network.neutron [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.475744] env[61852]: DEBUG nova.network.neutron [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.476425] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "8d8679db-eb9d-45c1-b053-70378f58e273" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.476707] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "8d8679db-eb9d-45c1-b053-70378f58e273" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.828521] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.033s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.829849] env[61852]: ERROR nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cf33027c-9160-4caf-a467-36d5407375d0, please check neutron logs for more information. [ 772.829849] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Traceback (most recent call last): [ 772.829849] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 772.829849] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] self.driver.spawn(context, instance, image_meta, [ 772.829849] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 772.829849] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 772.829849] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 772.829849] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] vm_ref = self.build_virtual_machine(instance, [ 772.829849] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 772.829849] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] vif_infos = vmwarevif.get_vif_info(self._session, [ 772.829849] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] for vif in network_info: [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] return self._sync_wrapper(fn, *args, **kwargs) [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] self.wait() [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] self[:] = self._gt.wait() [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] return self._exit_event.wait() [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] current.throw(*self._exc) [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 772.830165] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] result = function(*args, **kwargs) [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] return func(*args, **kwargs) [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] raise e [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] nwinfo = self.network_api.allocate_for_instance( [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] created_port_ids = self._update_ports_for_instance( [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] with excutils.save_and_reraise_exception(): [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] self.force_reraise() [ 772.830470] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.831864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] raise self.value [ 772.831864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 772.831864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] updated_port = self._update_port( [ 772.831864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.831864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] _ensure_no_port_binding_failure(port) [ 772.831864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.831864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] raise exception.PortBindingFailed(port_id=port['id']) [ 772.831864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] nova.exception.PortBindingFailed: Binding failed for port cf33027c-9160-4caf-a467-36d5407375d0, please check neutron logs for more information. [ 772.831864] env[61852]: ERROR nova.compute.manager [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] [ 772.831864] env[61852]: DEBUG nova.compute.utils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Binding failed for port cf33027c-9160-4caf-a467-36d5407375d0, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 772.832123] env[61852]: DEBUG nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Build of instance e795b0f0-2c9c-4f44-9058-fbe706873d5a was re-scheduled: Binding failed for port cf33027c-9160-4caf-a467-36d5407375d0, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 772.832123] env[61852]: DEBUG nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 772.832123] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Acquiring lock "refresh_cache-e795b0f0-2c9c-4f44-9058-fbe706873d5a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.832267] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Acquired lock "refresh_cache-e795b0f0-2c9c-4f44-9058-fbe706873d5a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.832682] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.833531] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.345s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.835714] env[61852]: INFO nova.compute.claims [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.978404] env[61852]: INFO nova.compute.manager [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] [instance: b0e0fcf9-1630-49aa-b053-5498245313b0] Took 1.03 seconds to deallocate network for instance. [ 773.007404] env[61852]: DEBUG nova.network.neutron [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.189134] env[61852]: DEBUG nova.network.neutron [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Updating instance_info_cache with network_info: [{"id": "916e36f2-cfed-41bf-bad3-33d92e9ef290", "address": "fa:16:3e:04:ee:0a", "network": {"id": "b7574815-b3d4-4324-807a-f24ce8e2d818", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-448141922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b16f796a681641bcab2679adc24e753a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap916e36f2-cf", "ovs_interfaceid": "916e36f2-cfed-41bf-bad3-33d92e9ef290", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.358046] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.434465] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.691731] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Releasing lock "refresh_cache-d48cefda-0b05-4ec0-8c1d-bc25cd491faf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.692081] env[61852]: DEBUG nova.compute.manager [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Instance network_info: |[{"id": "916e36f2-cfed-41bf-bad3-33d92e9ef290", "address": "fa:16:3e:04:ee:0a", "network": {"id": "b7574815-b3d4-4324-807a-f24ce8e2d818", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-448141922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b16f796a681641bcab2679adc24e753a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap916e36f2-cf", "ovs_interfaceid": "916e36f2-cfed-41bf-bad3-33d92e9ef290", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 773.692626] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:ee:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '916e36f2-cfed-41bf-bad3-33d92e9ef290', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.700799] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Creating folder: Project (b16f796a681641bcab2679adc24e753a). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.700799] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac21a379-f541-49f4-bd9a-4b09b2a7a4a4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.711921] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Created folder: Project (b16f796a681641bcab2679adc24e753a) in parent group-v277280. [ 773.712128] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Creating folder: Instances. Parent ref: group-v277301. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.712384] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77715a64-a920-494c-8287-8efb1335b6f0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.720952] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Created folder: Instances in parent group-v277301. [ 773.721187] env[61852]: DEBUG oslo.service.loopingcall [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 773.721364] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 773.721555] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9c25c15-f827-4181-89fe-ee90652823a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.739084] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 773.739084] env[61852]: value = "task-1292751" [ 773.739084] env[61852]: _type = "Task" [ 773.739084] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.749730] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292751, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.937326] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Releasing lock "refresh_cache-e795b0f0-2c9c-4f44-9058-fbe706873d5a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.937560] env[61852]: DEBUG nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 773.937737] env[61852]: DEBUG nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 773.937901] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 773.957575] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.982151] env[61852]: DEBUG nova.compute.manager [req-6382dca7-855d-4edb-a457-282c9b1b9a38 req-e09e612d-0bdc-4d7e-99ba-c335f907e672 service nova] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Received event network-changed-916e36f2-cfed-41bf-bad3-33d92e9ef290 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 773.982151] env[61852]: DEBUG nova.compute.manager [req-6382dca7-855d-4edb-a457-282c9b1b9a38 req-e09e612d-0bdc-4d7e-99ba-c335f907e672 service nova] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Refreshing instance network info cache due to event network-changed-916e36f2-cfed-41bf-bad3-33d92e9ef290. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 773.982151] env[61852]: DEBUG oslo_concurrency.lockutils [req-6382dca7-855d-4edb-a457-282c9b1b9a38 req-e09e612d-0bdc-4d7e-99ba-c335f907e672 service nova] Acquiring lock "refresh_cache-d48cefda-0b05-4ec0-8c1d-bc25cd491faf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.982151] env[61852]: DEBUG oslo_concurrency.lockutils [req-6382dca7-855d-4edb-a457-282c9b1b9a38 req-e09e612d-0bdc-4d7e-99ba-c335f907e672 service nova] Acquired lock "refresh_cache-d48cefda-0b05-4ec0-8c1d-bc25cd491faf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.982283] env[61852]: DEBUG nova.network.neutron [req-6382dca7-855d-4edb-a457-282c9b1b9a38 req-e09e612d-0bdc-4d7e-99ba-c335f907e672 service nova] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Refreshing network info cache for port 916e36f2-cfed-41bf-bad3-33d92e9ef290 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.013191] env[61852]: INFO nova.scheduler.client.report [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Deleted allocations for instance b0e0fcf9-1630-49aa-b053-5498245313b0 [ 774.191932] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67189094-cc58-49eb-b10b-57542a1c29d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.199503] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba10ba1f-021a-4af3-b9d6-758c3018a692 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.228654] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c56b0f-ac0b-4922-aed1-b962b49c8e45 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.235624] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e197d9-f5c4-41e2-aedc-fb85d14b4690 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.252911] env[61852]: DEBUG nova.compute.provider_tree [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.256579] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292751, 'name': CreateVM_Task, 'duration_secs': 0.323471} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.256917] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 774.263722] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.263884] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.264206] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 774.264641] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c18f78d6-febb-4235-9bd7-b11389dee2b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.268644] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for the task: (returnval){ [ 774.268644] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b80b8c-f09a-ca37-6e46-6871c7dafd98" [ 774.268644] env[61852]: _type = "Task" [ 774.268644] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.276055] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b80b8c-f09a-ca37-6e46-6871c7dafd98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.460621] env[61852]: DEBUG nova.network.neutron [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.525204] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f0ee8bdb-4a14-4429-86cd-b13b5f37f13f tempest-ServerGroupTestJSON-1326386899 tempest-ServerGroupTestJSON-1326386899-project-member] Lock "b0e0fcf9-1630-49aa-b053-5498245313b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.820s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.676821] env[61852]: DEBUG nova.network.neutron [req-6382dca7-855d-4edb-a457-282c9b1b9a38 req-e09e612d-0bdc-4d7e-99ba-c335f907e672 service nova] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Updated VIF entry in instance network info cache for port 916e36f2-cfed-41bf-bad3-33d92e9ef290. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 774.677185] env[61852]: DEBUG nova.network.neutron [req-6382dca7-855d-4edb-a457-282c9b1b9a38 req-e09e612d-0bdc-4d7e-99ba-c335f907e672 service nova] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Updating instance_info_cache with network_info: [{"id": "916e36f2-cfed-41bf-bad3-33d92e9ef290", "address": "fa:16:3e:04:ee:0a", "network": {"id": "b7574815-b3d4-4324-807a-f24ce8e2d818", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-448141922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b16f796a681641bcab2679adc24e753a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap916e36f2-cf", "ovs_interfaceid": "916e36f2-cfed-41bf-bad3-33d92e9ef290", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.759049] env[61852]: DEBUG nova.scheduler.client.report [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 774.778605] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b80b8c-f09a-ca37-6e46-6871c7dafd98, 'name': SearchDatastore_Task, 'duration_secs': 0.009016} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.779443] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.779674] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.779899] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.780052] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.780230] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.780712] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58e0a709-3ca0-47b1-ae57-65e4daef76fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.788488] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.788672] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 774.789347] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb8b8417-f57e-486c-a966-51189f7ca296 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.794094] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for the task: (returnval){ [ 774.794094] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52392538-0476-b72b-f584-f8060ced0ee6" [ 774.794094] env[61852]: _type = "Task" [ 774.794094] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.801469] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52392538-0476-b72b-f584-f8060ced0ee6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.965072] env[61852]: INFO nova.compute.manager [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] [instance: e795b0f0-2c9c-4f44-9058-fbe706873d5a] Took 1.03 seconds to deallocate network for instance. [ 775.027636] env[61852]: DEBUG nova.compute.manager [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 775.179874] env[61852]: DEBUG oslo_concurrency.lockutils [req-6382dca7-855d-4edb-a457-282c9b1b9a38 req-e09e612d-0bdc-4d7e-99ba-c335f907e672 service nova] Releasing lock "refresh_cache-d48cefda-0b05-4ec0-8c1d-bc25cd491faf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.263929] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.264437] env[61852]: DEBUG nova.compute.manager [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 775.267184] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.219s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.267551] env[61852]: DEBUG nova.objects.instance [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61852) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 775.310492] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52392538-0476-b72b-f584-f8060ced0ee6, 'name': SearchDatastore_Task, 'duration_secs': 0.007203} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.311582] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2822401-d6c4-42a2-b5cf-73ce598bc618 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.317177] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for the task: (returnval){ [ 775.317177] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b6195a-89a7-5f84-3824-6c13dc765334" [ 775.317177] env[61852]: _type = "Task" [ 775.317177] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.326365] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b6195a-89a7-5f84-3824-6c13dc765334, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.550255] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.772648] env[61852]: DEBUG nova.compute.utils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 775.777396] env[61852]: DEBUG nova.compute.manager [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 775.777579] env[61852]: DEBUG nova.network.neutron [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.828178] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b6195a-89a7-5f84-3824-6c13dc765334, 'name': SearchDatastore_Task, 'duration_secs': 0.009257} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.828462] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.828718] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d48cefda-0b05-4ec0-8c1d-bc25cd491faf/d48cefda-0b05-4ec0-8c1d-bc25cd491faf.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 775.828977] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3838263-79be-4355-9df7-9e73e2017178 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.836079] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for the task: (returnval){ [ 775.836079] env[61852]: value = "task-1292752" [ 775.836079] env[61852]: _type = "Task" [ 775.836079] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.840427] env[61852]: DEBUG nova.policy [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f04d129452d4eb79514c52a6972af0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e93a6965a6884292bc56b01f7d54a622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 775.847416] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.000870] env[61852]: INFO nova.scheduler.client.report [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Deleted allocations for instance e795b0f0-2c9c-4f44-9058-fbe706873d5a [ 776.135595] env[61852]: DEBUG nova.network.neutron [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Successfully created port: 9e5204e6-6870-43d3-986f-9ca080104e14 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.278991] env[61852]: DEBUG nova.compute.manager [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 776.283818] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2ad9548c-ca86-40e0-bfcf-d9d95f363acc tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.288371] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.031s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.290536] env[61852]: INFO nova.compute.claims [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 776.350221] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292752, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.515895] env[61852]: DEBUG oslo_concurrency.lockutils [None req-de8e3dad-6082-4a03-addd-def0e2f9ada9 tempest-ServerActionsV293TestJSON-206449306 tempest-ServerActionsV293TestJSON-206449306-project-member] Lock "e795b0f0-2c9c-4f44-9058-fbe706873d5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.444s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.847009] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292752, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524462} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.847299] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d48cefda-0b05-4ec0-8c1d-bc25cd491faf/d48cefda-0b05-4ec0-8c1d-bc25cd491faf.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 776.847507] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 776.847820] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6386bc03-41c0-490c-8819-f33ae47ca34b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.854284] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for the task: (returnval){ [ 776.854284] env[61852]: value = "task-1292754" [ 776.854284] env[61852]: _type = "Task" [ 776.854284] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.862169] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292754, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.017115] env[61852]: DEBUG nova.compute.manager [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 777.293731] env[61852]: DEBUG nova.compute.manager [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 777.317450] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 777.317695] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 777.317851] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.318042] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 777.318192] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.318336] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 777.318538] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 777.318697] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 777.318860] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 777.319059] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 777.319444] env[61852]: DEBUG nova.virt.hardware [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 777.320307] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8e531b-a639-45b9-874a-6a127572a041 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.331071] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d0b930-0d10-4c1a-8092-f3c699c6f0c1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.363293] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292754, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070746} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.363582] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.366908] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0592ee6-ba0e-42bd-91a7-11f1e1122b8e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.387449] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] d48cefda-0b05-4ec0-8c1d-bc25cd491faf/d48cefda-0b05-4ec0-8c1d-bc25cd491faf.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.389783] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bd99771-7640-4c05-a5dd-f25ea6fa5e43 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.407963] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for the task: (returnval){ [ 777.407963] env[61852]: value = "task-1292755" [ 777.407963] env[61852]: _type = "Task" [ 777.407963] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.417353] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292755, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.535907] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.648702] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfad5c54-3f2d-432f-a965-dc187510c8af {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.656300] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6c2cbb-3200-420f-bb37-022dbfc0a6f0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.686036] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070f18a8-cb83-4b22-a1fd-fe56a4193047 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.693726] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31200eee-a483-4319-a3ee-9a935a4ba693 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.710589] env[61852]: DEBUG nova.compute.provider_tree [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.921944] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292755, 'name': ReconfigVM_Task, 'duration_secs': 0.294538} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.921944] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Reconfigured VM instance instance-00000034 to attach disk [datastore1] d48cefda-0b05-4ec0-8c1d-bc25cd491faf/d48cefda-0b05-4ec0-8c1d-bc25cd491faf.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 777.923074] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60ed45d8-fe66-43fb-85d0-b1456c967da9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.932019] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for the task: (returnval){ [ 777.932019] env[61852]: value = "task-1292756" [ 777.932019] env[61852]: _type = "Task" [ 777.932019] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.938647] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292756, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.012044] env[61852]: DEBUG nova.compute.manager [req-a5d6d3d6-4521-47e5-ae2b-d2931acea235 req-2b4c4d1e-65ed-44b9-a3a1-fe79f122d949 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received event network-vif-plugged-9e5204e6-6870-43d3-986f-9ca080104e14 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 778.012044] env[61852]: DEBUG oslo_concurrency.lockutils [req-a5d6d3d6-4521-47e5-ae2b-d2931acea235 req-2b4c4d1e-65ed-44b9-a3a1-fe79f122d949 service nova] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.012044] env[61852]: DEBUG oslo_concurrency.lockutils [req-a5d6d3d6-4521-47e5-ae2b-d2931acea235 req-2b4c4d1e-65ed-44b9-a3a1-fe79f122d949 service nova] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.012044] env[61852]: DEBUG oslo_concurrency.lockutils [req-a5d6d3d6-4521-47e5-ae2b-d2931acea235 req-2b4c4d1e-65ed-44b9-a3a1-fe79f122d949 service nova] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.012044] env[61852]: DEBUG nova.compute.manager [req-a5d6d3d6-4521-47e5-ae2b-d2931acea235 req-2b4c4d1e-65ed-44b9-a3a1-fe79f122d949 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] No waiting events found dispatching network-vif-plugged-9e5204e6-6870-43d3-986f-9ca080104e14 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 778.012425] env[61852]: WARNING nova.compute.manager [req-a5d6d3d6-4521-47e5-ae2b-d2931acea235 req-2b4c4d1e-65ed-44b9-a3a1-fe79f122d949 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received unexpected event network-vif-plugged-9e5204e6-6870-43d3-986f-9ca080104e14 for instance with vm_state building and task_state spawning. [ 778.208682] env[61852]: DEBUG nova.network.neutron [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Successfully updated port: 9e5204e6-6870-43d3-986f-9ca080104e14 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 778.215726] env[61852]: DEBUG nova.scheduler.client.report [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 778.439334] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292756, 'name': Rename_Task, 'duration_secs': 0.132759} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.440504] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.440504] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61c848a2-54f2-43ca-86bb-928ac4942927 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.446640] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for the task: (returnval){ [ 778.446640] env[61852]: value = "task-1292757" [ 778.446640] env[61852]: _type = "Task" [ 778.446640] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.453868] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292757, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.714099] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.714099] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.714099] env[61852]: DEBUG nova.network.neutron [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.721616] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.437s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.721694] env[61852]: DEBUG nova.compute.manager [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 778.724596] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.808s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.724861] env[61852]: DEBUG nova.objects.instance [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lazy-loading 'resources' on Instance uuid be44214d-72dc-4517-a91a-7f659b5aa897 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 778.956734] env[61852]: DEBUG oslo_vmware.api [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292757, 'name': PowerOnVM_Task, 'duration_secs': 0.442065} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.956734] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 778.956734] env[61852]: INFO nova.compute.manager [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Took 8.22 seconds to spawn the instance on the hypervisor. [ 778.957115] env[61852]: DEBUG nova.compute.manager [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 778.957503] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7552f7-eadb-4a01-9d9e-c5d1e0cc2426 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.227274] env[61852]: DEBUG nova.compute.utils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 779.228752] env[61852]: DEBUG nova.compute.manager [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 779.228850] env[61852]: DEBUG nova.network.neutron [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 779.251950] env[61852]: DEBUG nova.network.neutron [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.303114] env[61852]: DEBUG nova.policy [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df0e8bd711f44d1aa3cf45a0bb3d527b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e348864f10e64e0e96b5cb2d21a0e95e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 779.404199] env[61852]: DEBUG nova.network.neutron [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.477654] env[61852]: INFO nova.compute.manager [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Took 41.40 seconds to build instance. [ 779.604761] env[61852]: DEBUG nova.network.neutron [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Successfully created port: 1f3009be-a3c0-4ce3-b287-2d744cee79c4 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.677468] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7ff7c6-b108-4958-b339-d894829c5520 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.685185] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a079dc39-02bc-409d-b7f6-dccb13c52e50 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.716902] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a56deb-5833-4d81-9b62-467cf1bbbcb8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.731490] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191a1b06-a482-428c-b379-05534a76a915 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.735454] env[61852]: DEBUG nova.compute.manager [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 779.751117] env[61852]: DEBUG nova.compute.provider_tree [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.906286] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.906621] env[61852]: DEBUG nova.compute.manager [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Instance network_info: |[{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 779.907607] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:59:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e5204e6-6870-43d3-986f-9ca080104e14', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 779.915585] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Creating folder: Project (e93a6965a6884292bc56b01f7d54a622). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 779.915585] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15f12663-1dca-4b35-86aa-1d456547c1eb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.928016] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Created folder: Project (e93a6965a6884292bc56b01f7d54a622) in parent group-v277280. [ 779.928016] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Creating folder: Instances. Parent ref: group-v277304. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 779.928016] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-525af3ae-837d-42f7-a24a-077759875d27 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.934433] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Created folder: Instances in parent group-v277304. [ 779.934661] env[61852]: DEBUG oslo.service.loopingcall [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 779.934849] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 779.935056] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90d3b7e8-4ed0-4ab3-a383-3ce83a252156 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.953853] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 779.953853] env[61852]: value = "task-1292760" [ 779.953853] env[61852]: _type = "Task" [ 779.953853] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.960945] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292760, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.980889] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe6f76ac-122b-48c4-bb95-35d29c74803e tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.060s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.036521] env[61852]: DEBUG nova.compute.manager [req-d42af8fd-9c0f-47ae-ad95-b17f59a8a403 req-e460365f-4f63-4e41-a522-3ac0c1a5d779 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received event network-changed-9e5204e6-6870-43d3-986f-9ca080104e14 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 780.036673] env[61852]: DEBUG nova.compute.manager [req-d42af8fd-9c0f-47ae-ad95-b17f59a8a403 req-e460365f-4f63-4e41-a522-3ac0c1a5d779 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Refreshing instance network info cache due to event network-changed-9e5204e6-6870-43d3-986f-9ca080104e14. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 780.036915] env[61852]: DEBUG oslo_concurrency.lockutils [req-d42af8fd-9c0f-47ae-ad95-b17f59a8a403 req-e460365f-4f63-4e41-a522-3ac0c1a5d779 service nova] Acquiring lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.037017] env[61852]: DEBUG oslo_concurrency.lockutils [req-d42af8fd-9c0f-47ae-ad95-b17f59a8a403 req-e460365f-4f63-4e41-a522-3ac0c1a5d779 service nova] Acquired lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.037181] env[61852]: DEBUG nova.network.neutron [req-d42af8fd-9c0f-47ae-ad95-b17f59a8a403 req-e460365f-4f63-4e41-a522-3ac0c1a5d779 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Refreshing network info cache for port 9e5204e6-6870-43d3-986f-9ca080104e14 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 780.254340] env[61852]: DEBUG nova.scheduler.client.report [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 780.464400] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292760, 'name': CreateVM_Task, 'duration_secs': 0.291219} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.464400] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 780.464995] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.465178] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.465470] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 780.465719] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a5b957b-adf1-49ca-9511-1175e1175542 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.470663] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 780.470663] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]528ca303-40c7-c086-58f5-395ce8aafd0b" [ 780.470663] env[61852]: _type = "Task" [ 780.470663] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.478650] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]528ca303-40c7-c086-58f5-395ce8aafd0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.483133] env[61852]: DEBUG nova.compute.manager [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 780.726873] env[61852]: DEBUG nova.network.neutron [req-d42af8fd-9c0f-47ae-ad95-b17f59a8a403 req-e460365f-4f63-4e41-a522-3ac0c1a5d779 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updated VIF entry in instance network info cache for port 9e5204e6-6870-43d3-986f-9ca080104e14. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.727304] env[61852]: DEBUG nova.network.neutron [req-d42af8fd-9c0f-47ae-ad95-b17f59a8a403 req-e460365f-4f63-4e41-a522-3ac0c1a5d779 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.745173] env[61852]: DEBUG nova.compute.manager [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 780.759035] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.034s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.761056] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.633s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.762901] env[61852]: INFO nova.compute.claims [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.773561] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 780.774113] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 780.774113] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.774113] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 780.774293] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.774391] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 780.774612] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 780.774772] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 780.775065] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 780.775125] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 780.775263] env[61852]: DEBUG nova.virt.hardware [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 780.776499] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9306c5ca-2c96-4f18-8648-fe762c89c0b5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.781300] env[61852]: INFO nova.scheduler.client.report [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Deleted allocations for instance be44214d-72dc-4517-a91a-7f659b5aa897 [ 780.787581] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087019c5-b969-4cad-a379-6b62fbe39405 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.980729] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]528ca303-40c7-c086-58f5-395ce8aafd0b, 'name': SearchDatastore_Task, 'duration_secs': 0.008946} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.981058] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.981269] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 780.981496] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.981639] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.981810] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 780.982079] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12b63c15-42f4-498c-9f45-b05a853e0738 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.990200] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 780.990376] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 780.992967] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c12bb7c3-d08e-4598-a9dc-605c3781fded {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.998364] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 780.998364] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d66878-93ba-5070-f5bc-b7d5bc8433d4" [ 780.998364] env[61852]: _type = "Task" [ 780.998364] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.005220] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.008736] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d66878-93ba-5070-f5bc-b7d5bc8433d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.121552] env[61852]: DEBUG nova.compute.manager [req-bd7e945d-c341-42ae-a3d5-5d655f552ccb req-46fa69f6-4c27-45df-ba47-930fa502917d service nova] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Received event network-vif-plugged-1f3009be-a3c0-4ce3-b287-2d744cee79c4 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 781.123966] env[61852]: DEBUG oslo_concurrency.lockutils [req-bd7e945d-c341-42ae-a3d5-5d655f552ccb req-46fa69f6-4c27-45df-ba47-930fa502917d service nova] Acquiring lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.124285] env[61852]: DEBUG oslo_concurrency.lockutils [req-bd7e945d-c341-42ae-a3d5-5d655f552ccb req-46fa69f6-4c27-45df-ba47-930fa502917d service nova] Lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.124691] env[61852]: DEBUG oslo_concurrency.lockutils [req-bd7e945d-c341-42ae-a3d5-5d655f552ccb req-46fa69f6-4c27-45df-ba47-930fa502917d service nova] Lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.124952] env[61852]: DEBUG nova.compute.manager [req-bd7e945d-c341-42ae-a3d5-5d655f552ccb req-46fa69f6-4c27-45df-ba47-930fa502917d service nova] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] No waiting events found dispatching network-vif-plugged-1f3009be-a3c0-4ce3-b287-2d744cee79c4 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 781.125212] env[61852]: WARNING nova.compute.manager [req-bd7e945d-c341-42ae-a3d5-5d655f552ccb req-46fa69f6-4c27-45df-ba47-930fa502917d service nova] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Received unexpected event network-vif-plugged-1f3009be-a3c0-4ce3-b287-2d744cee79c4 for instance with vm_state building and task_state spawning. [ 781.229964] env[61852]: DEBUG oslo_concurrency.lockutils [req-d42af8fd-9c0f-47ae-ad95-b17f59a8a403 req-e460365f-4f63-4e41-a522-3ac0c1a5d779 service nova] Releasing lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.295803] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6ada4613-4932-497a-956c-d02e0fdda573 tempest-ServerShowV254Test-1165329400 tempest-ServerShowV254Test-1165329400-project-member] Lock "be44214d-72dc-4517-a91a-7f659b5aa897" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.043s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.314145] env[61852]: DEBUG nova.network.neutron [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Successfully updated port: 1f3009be-a3c0-4ce3-b287-2d744cee79c4 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 781.508307] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d66878-93ba-5070-f5bc-b7d5bc8433d4, 'name': SearchDatastore_Task, 'duration_secs': 0.008173} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.510695] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74420f9e-1f54-4d21-926b-4f97644ada9c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.516529] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 781.516529] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a78721-dae8-e39f-59ac-89a4eefdabb6" [ 781.516529] env[61852]: _type = "Task" [ 781.516529] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.524819] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a78721-dae8-e39f-59ac-89a4eefdabb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.791945] env[61852]: DEBUG nova.scheduler.client.report [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 781.807333] env[61852]: DEBUG nova.scheduler.client.report [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 781.807611] env[61852]: DEBUG nova.compute.provider_tree [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 781.817369] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquiring lock "refresh_cache-cb50d964-5c0e-4cf3-b652-0f7b7a488f91" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.817525] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquired lock "refresh_cache-cb50d964-5c0e-4cf3-b652-0f7b7a488f91" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.817722] env[61852]: DEBUG nova.network.neutron [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 781.822782] env[61852]: DEBUG nova.scheduler.client.report [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 781.841030] env[61852]: DEBUG nova.scheduler.client.report [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 782.029068] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a78721-dae8-e39f-59ac-89a4eefdabb6, 'name': SearchDatastore_Task, 'duration_secs': 0.008715} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.031207] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.031469] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d3922357-383f-4f7e-9c76-4eb688a092b9/d3922357-383f-4f7e-9c76-4eb688a092b9.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 782.031881] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-009fd346-b51d-4150-9877-802c5ef96e52 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.037866] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 782.037866] env[61852]: value = "task-1292761" [ 782.037866] env[61852]: _type = "Task" [ 782.037866] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.048395] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292761, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.125469] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba3d8ff-ad00-421e-907c-b4b8e35b7f74 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.132837] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c7104a-6561-4b85-9fd0-2b5c60887536 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.164746] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8a7dcc-8749-44f2-995b-84ba998218d6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.172090] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de8b744-8dd9-4566-8a7c-d5b52b205fc3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.185159] env[61852]: DEBUG nova.compute.provider_tree [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.364953] env[61852]: DEBUG nova.network.neutron [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.553038] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292761, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.681191] env[61852]: DEBUG nova.network.neutron [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Updating instance_info_cache with network_info: [{"id": "1f3009be-a3c0-4ce3-b287-2d744cee79c4", "address": "fa:16:3e:61:3b:8a", "network": {"id": "13d7a9a8-ebc7-41fe-9af8-f399f6ee7257", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-579668367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e348864f10e64e0e96b5cb2d21a0e95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f3009be-a3", "ovs_interfaceid": "1f3009be-a3c0-4ce3-b287-2d744cee79c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.688328] env[61852]: DEBUG nova.scheduler.client.report [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 783.049211] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292761, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.741478} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.049444] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d3922357-383f-4f7e-9c76-4eb688a092b9/d3922357-383f-4f7e-9c76-4eb688a092b9.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 783.049606] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 783.049852] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf780206-7307-4ce6-93bc-e6208389ade8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.056594] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 783.056594] env[61852]: value = "task-1292762" [ 783.056594] env[61852]: _type = "Task" [ 783.056594] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.063946] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292762, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.146732] env[61852]: DEBUG nova.compute.manager [req-d2bf6ea2-cb8d-4035-9c8c-468a9e795144 req-b62b96ae-4413-4a68-91f0-2e73592cdfd6 service nova] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Received event network-changed-1f3009be-a3c0-4ce3-b287-2d744cee79c4 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 783.147413] env[61852]: DEBUG nova.compute.manager [req-d2bf6ea2-cb8d-4035-9c8c-468a9e795144 req-b62b96ae-4413-4a68-91f0-2e73592cdfd6 service nova] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Refreshing instance network info cache due to event network-changed-1f3009be-a3c0-4ce3-b287-2d744cee79c4. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 783.147413] env[61852]: DEBUG oslo_concurrency.lockutils [req-d2bf6ea2-cb8d-4035-9c8c-468a9e795144 req-b62b96ae-4413-4a68-91f0-2e73592cdfd6 service nova] Acquiring lock "refresh_cache-cb50d964-5c0e-4cf3-b652-0f7b7a488f91" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.184479] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Releasing lock "refresh_cache-cb50d964-5c0e-4cf3-b652-0f7b7a488f91" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.184808] env[61852]: DEBUG nova.compute.manager [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Instance network_info: |[{"id": "1f3009be-a3c0-4ce3-b287-2d744cee79c4", "address": "fa:16:3e:61:3b:8a", "network": {"id": "13d7a9a8-ebc7-41fe-9af8-f399f6ee7257", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-579668367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e348864f10e64e0e96b5cb2d21a0e95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f3009be-a3", "ovs_interfaceid": "1f3009be-a3c0-4ce3-b287-2d744cee79c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 783.185112] env[61852]: DEBUG oslo_concurrency.lockutils [req-d2bf6ea2-cb8d-4035-9c8c-468a9e795144 req-b62b96ae-4413-4a68-91f0-2e73592cdfd6 service nova] Acquired lock "refresh_cache-cb50d964-5c0e-4cf3-b652-0f7b7a488f91" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.185287] env[61852]: DEBUG nova.network.neutron [req-d2bf6ea2-cb8d-4035-9c8c-468a9e795144 req-b62b96ae-4413-4a68-91f0-2e73592cdfd6 service nova] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Refreshing network info cache for port 1f3009be-a3c0-4ce3-b287-2d744cee79c4 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.186477] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:3b:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68ec9c06-8680-4a41-abad-cddbd1f768c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f3009be-a3c0-4ce3-b287-2d744cee79c4', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 783.194217] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Creating folder: Project (e348864f10e64e0e96b5cb2d21a0e95e). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 783.195597] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.196081] env[61852]: DEBUG nova.compute.manager [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 783.198612] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26f16b15-05a4-4c79-981e-a1e440194ecf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.200582] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.541s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.213257] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Created folder: Project (e348864f10e64e0e96b5cb2d21a0e95e) in parent group-v277280. [ 783.213343] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Creating folder: Instances. Parent ref: group-v277307. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 783.213586] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e247cd7-e98e-4f9c-afcf-f3be0c58380c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.223209] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Created folder: Instances in parent group-v277307. [ 783.223591] env[61852]: DEBUG oslo.service.loopingcall [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 783.223677] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 783.223843] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c521966-2f58-40f5-b157-f042d732f22f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.245183] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 783.245183] env[61852]: value = "task-1292765" [ 783.245183] env[61852]: _type = "Task" [ 783.245183] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.253504] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292765, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.565992] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292762, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064369} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.566273] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 783.567032] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e03249d-0239-4fb5-a204-b25603ebe31b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.589047] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] d3922357-383f-4f7e-9c76-4eb688a092b9/d3922357-383f-4f7e-9c76-4eb688a092b9.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 783.589295] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-399b5b35-886e-4a70-9939-fcd87d9ae8f3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.607709] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 783.607709] env[61852]: value = "task-1292766" [ 783.607709] env[61852]: _type = "Task" [ 783.607709] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.615546] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292766, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.701963] env[61852]: DEBUG nova.compute.utils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 783.703598] env[61852]: DEBUG nova.compute.manager [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 783.703761] env[61852]: DEBUG nova.network.neutron [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 783.752083] env[61852]: DEBUG nova.policy [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbd20746bcea49d8927d9d66a3c0240b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '544e2807166b4ee58e0672c6d71f6d7e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 783.762251] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292765, 'name': CreateVM_Task} progress is 25%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.039451] env[61852]: DEBUG nova.network.neutron [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Successfully created port: 94d9d6be-2b90-477a-a1a2-f04ffbdabc54 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 784.067141] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab19dae-8078-45c3-9396-716a6e0580ca {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.074348] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dedfed13-9828-450c-b485-20af6a6fe564 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.078646] env[61852]: DEBUG nova.network.neutron [req-d2bf6ea2-cb8d-4035-9c8c-468a9e795144 req-b62b96ae-4413-4a68-91f0-2e73592cdfd6 service nova] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Updated VIF entry in instance network info cache for port 1f3009be-a3c0-4ce3-b287-2d744cee79c4. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 784.079105] env[61852]: DEBUG nova.network.neutron [req-d2bf6ea2-cb8d-4035-9c8c-468a9e795144 req-b62b96ae-4413-4a68-91f0-2e73592cdfd6 service nova] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Updating instance_info_cache with network_info: [{"id": "1f3009be-a3c0-4ce3-b287-2d744cee79c4", "address": "fa:16:3e:61:3b:8a", "network": {"id": "13d7a9a8-ebc7-41fe-9af8-f399f6ee7257", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-579668367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e348864f10e64e0e96b5cb2d21a0e95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f3009be-a3", "ovs_interfaceid": "1f3009be-a3c0-4ce3-b287-2d744cee79c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.118518] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b1b002-8fca-424e-afc9-09a396286dc1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.127767] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.133207] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f0fbeb-86ec-4031-8d7b-10b78d9550e7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.147169] env[61852]: DEBUG nova.compute.provider_tree [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.207544] env[61852]: DEBUG nova.compute.manager [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 784.255850] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292765, 'name': CreateVM_Task, 'duration_secs': 0.827563} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.256055] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 784.256840] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.257014] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.257344] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 784.257582] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-806ed98d-2aff-4664-bcad-edf029aabb5d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.262373] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for the task: (returnval){ [ 784.262373] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52911b4d-2be2-0eab-0a33-029f5de7ad95" [ 784.262373] env[61852]: _type = "Task" [ 784.262373] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.269881] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52911b4d-2be2-0eab-0a33-029f5de7ad95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.584611] env[61852]: DEBUG oslo_concurrency.lockutils [req-d2bf6ea2-cb8d-4035-9c8c-468a9e795144 req-b62b96ae-4413-4a68-91f0-2e73592cdfd6 service nova] Releasing lock "refresh_cache-cb50d964-5c0e-4cf3-b652-0f7b7a488f91" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.622033] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292766, 'name': ReconfigVM_Task, 'duration_secs': 1.011874} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.622033] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Reconfigured VM instance instance-00000035 to attach disk [datastore1] d3922357-383f-4f7e-9c76-4eb688a092b9/d3922357-383f-4f7e-9c76-4eb688a092b9.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.622033] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc96c2ff-0cbf-4617-ae23-b850d38479a5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.627492] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 784.627492] env[61852]: value = "task-1292767" [ 784.627492] env[61852]: _type = "Task" [ 784.627492] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.634777] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292767, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.649922] env[61852]: DEBUG nova.scheduler.client.report [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 784.772256] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52911b4d-2be2-0eab-0a33-029f5de7ad95, 'name': SearchDatastore_Task, 'duration_secs': 0.007872} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.772581] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.772811] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 784.773106] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.773263] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.773443] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 784.773684] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af749859-5cd0-4ba1-bacc-9a83afb3be2f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.780926] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 784.781074] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 784.781770] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbbed0e5-03c7-4f71-b424-1800c77d2880 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.786531] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for the task: (returnval){ [ 784.786531] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ffeb7a-7f9c-e4c9-6b95-f69c95bf0f9e" [ 784.786531] env[61852]: _type = "Task" [ 784.786531] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.793569] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ffeb7a-7f9c-e4c9-6b95-f69c95bf0f9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.138254] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292767, 'name': Rename_Task, 'duration_secs': 0.127834} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.138527] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 785.138761] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f49aaa69-5918-4e2a-9046-f152c2fb5d86 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.144909] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 785.144909] env[61852]: value = "task-1292768" [ 785.144909] env[61852]: _type = "Task" [ 785.144909] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.152271] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292768, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.155074] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.954s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.155659] env[61852]: ERROR nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 12409f55-90b1-4ec0-9fae-fd4c1612154c, please check neutron logs for more information. [ 785.155659] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Traceback (most recent call last): [ 785.155659] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 785.155659] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] self.driver.spawn(context, instance, image_meta, [ 785.155659] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 785.155659] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] self._vmops.spawn(context, instance, image_meta, injected_files, [ 785.155659] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 785.155659] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] vm_ref = self.build_virtual_machine(instance, [ 785.155659] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 785.155659] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] vif_infos = vmwarevif.get_vif_info(self._session, [ 785.155659] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] for vif in network_info: [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] return self._sync_wrapper(fn, *args, **kwargs) [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] self.wait() [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] self[:] = self._gt.wait() [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] return self._exit_event.wait() [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] current.throw(*self._exc) [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 785.156009] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] result = function(*args, **kwargs) [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] return func(*args, **kwargs) [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] raise e [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] nwinfo = self.network_api.allocate_for_instance( [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] created_port_ids = self._update_ports_for_instance( [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] with excutils.save_and_reraise_exception(): [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] self.force_reraise() [ 785.156388] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 785.156716] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] raise self.value [ 785.156716] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 785.156716] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] updated_port = self._update_port( [ 785.156716] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 785.156716] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] _ensure_no_port_binding_failure(port) [ 785.156716] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 785.156716] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] raise exception.PortBindingFailed(port_id=port['id']) [ 785.156716] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] nova.exception.PortBindingFailed: Binding failed for port 12409f55-90b1-4ec0-9fae-fd4c1612154c, please check neutron logs for more information. [ 785.156716] env[61852]: ERROR nova.compute.manager [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] [ 785.156716] env[61852]: DEBUG nova.compute.utils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Binding failed for port 12409f55-90b1-4ec0-9fae-fd4c1612154c, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 785.157708] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.671s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.158996] env[61852]: INFO nova.compute.claims [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 785.161566] env[61852]: DEBUG nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Build of instance d6a46605-aa45-4de3-80a8-cb73b9980669 was re-scheduled: Binding failed for port 12409f55-90b1-4ec0-9fae-fd4c1612154c, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 785.162054] env[61852]: DEBUG nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 785.162240] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquiring lock "refresh_cache-d6a46605-aa45-4de3-80a8-cb73b9980669" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.162387] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Acquired lock "refresh_cache-d6a46605-aa45-4de3-80a8-cb73b9980669" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.162545] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.217194] env[61852]: DEBUG nova.compute.manager [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 785.247023] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 785.247219] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 785.247219] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.247504] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 785.247689] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.247875] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 785.248169] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 785.248393] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 785.248575] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 785.248752] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 785.248937] env[61852]: DEBUG nova.virt.hardware [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 785.249847] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c8e4e5-191a-497b-9426-cbd0c60f65e5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.258449] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0a23b8-b400-4ba6-b571-49359f706050 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.295995] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ffeb7a-7f9c-e4c9-6b95-f69c95bf0f9e, 'name': SearchDatastore_Task, 'duration_secs': 0.007564} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.296782] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f17a9de5-e83b-47a6-9d71-50b014f3fa94 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.302068] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for the task: (returnval){ [ 785.302068] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524ba6f5-c744-91b8-0af8-2a58f3f10125" [ 785.302068] env[61852]: _type = "Task" [ 785.302068] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.309857] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524ba6f5-c744-91b8-0af8-2a58f3f10125, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.435713] env[61852]: DEBUG nova.compute.manager [req-7dd38726-ec52-4a54-82db-d90f23ca7e10 req-59d77e5e-ea5c-4649-b7b0-f06e91ffd010 service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Received event network-vif-plugged-94d9d6be-2b90-477a-a1a2-f04ffbdabc54 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 785.435935] env[61852]: DEBUG oslo_concurrency.lockutils [req-7dd38726-ec52-4a54-82db-d90f23ca7e10 req-59d77e5e-ea5c-4649-b7b0-f06e91ffd010 service nova] Acquiring lock "46ccab1f-b7af-49df-a38d-af1fa3bac486-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.436170] env[61852]: DEBUG oslo_concurrency.lockutils [req-7dd38726-ec52-4a54-82db-d90f23ca7e10 req-59d77e5e-ea5c-4649-b7b0-f06e91ffd010 service nova] Lock "46ccab1f-b7af-49df-a38d-af1fa3bac486-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.436336] env[61852]: DEBUG oslo_concurrency.lockutils [req-7dd38726-ec52-4a54-82db-d90f23ca7e10 req-59d77e5e-ea5c-4649-b7b0-f06e91ffd010 service nova] Lock "46ccab1f-b7af-49df-a38d-af1fa3bac486-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.436500] env[61852]: DEBUG nova.compute.manager [req-7dd38726-ec52-4a54-82db-d90f23ca7e10 req-59d77e5e-ea5c-4649-b7b0-f06e91ffd010 service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] No waiting events found dispatching network-vif-plugged-94d9d6be-2b90-477a-a1a2-f04ffbdabc54 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 785.436660] env[61852]: WARNING nova.compute.manager [req-7dd38726-ec52-4a54-82db-d90f23ca7e10 req-59d77e5e-ea5c-4649-b7b0-f06e91ffd010 service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Received unexpected event network-vif-plugged-94d9d6be-2b90-477a-a1a2-f04ffbdabc54 for instance with vm_state building and task_state spawning. [ 785.527635] env[61852]: DEBUG nova.network.neutron [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Successfully updated port: 94d9d6be-2b90-477a-a1a2-f04ffbdabc54 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 785.655562] env[61852]: DEBUG oslo_vmware.api [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292768, 'name': PowerOnVM_Task, 'duration_secs': 0.451808} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.655833] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 785.656048] env[61852]: INFO nova.compute.manager [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Took 8.36 seconds to spawn the instance on the hypervisor. [ 785.656228] env[61852]: DEBUG nova.compute.manager [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 785.656981] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7fcd0f-eba9-470b-9184-85702f30f770 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.684071] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.761135] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.811880] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524ba6f5-c744-91b8-0af8-2a58f3f10125, 'name': SearchDatastore_Task, 'duration_secs': 0.008961} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.812159] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.812411] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] cb50d964-5c0e-4cf3-b652-0f7b7a488f91/cb50d964-5c0e-4cf3-b652-0f7b7a488f91.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 785.812684] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-056486ea-4d26-4cdf-9ba6-b0d6c3d391b3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.819061] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for the task: (returnval){ [ 785.819061] env[61852]: value = "task-1292769" [ 785.819061] env[61852]: _type = "Task" [ 785.819061] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.826461] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292769, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.030888] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquiring lock "refresh_cache-46ccab1f-b7af-49df-a38d-af1fa3bac486" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.031102] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquired lock "refresh_cache-46ccab1f-b7af-49df-a38d-af1fa3bac486" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.031304] env[61852]: DEBUG nova.network.neutron [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.180685] env[61852]: INFO nova.compute.manager [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Took 36.71 seconds to build instance. [ 786.263637] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Releasing lock "refresh_cache-d6a46605-aa45-4de3-80a8-cb73b9980669" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.263921] env[61852]: DEBUG nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 786.264116] env[61852]: DEBUG nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 786.264288] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 786.280258] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.332795] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292769, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508703} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.333141] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] cb50d964-5c0e-4cf3-b652-0f7b7a488f91/cb50d964-5c0e-4cf3-b652-0f7b7a488f91.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 786.333394] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 786.333698] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-041042c2-ba64-4682-85d6-dcbac4a06261 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.340560] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for the task: (returnval){ [ 786.340560] env[61852]: value = "task-1292770" [ 786.340560] env[61852]: _type = "Task" [ 786.340560] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.349595] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292770, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.544395] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8fceeb-e3f2-4c4d-8371-6776056976d4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.553500] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64aa294a-f7a0-4bc2-95b3-8aaa308e5d6a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.585689] env[61852]: DEBUG nova.network.neutron [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.588085] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c952beb-26ee-4c33-b23d-b3890810a4e2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.595930] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bc6c8d-0407-4ea4-b329-a685aa22371b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.609889] env[61852]: DEBUG nova.compute.provider_tree [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.683046] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ade5ac5b-8386-4366-856e-625fc7d230db tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.899s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.739922] env[61852]: DEBUG nova.network.neutron [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Updating instance_info_cache with network_info: [{"id": "94d9d6be-2b90-477a-a1a2-f04ffbdabc54", "address": "fa:16:3e:f9:bd:bc", "network": {"id": "08104a62-7c1c-40a8-be3f-fc4fe7d609d7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-148853082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544e2807166b4ee58e0672c6d71f6d7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d9d6be-2b", "ovs_interfaceid": "94d9d6be-2b90-477a-a1a2-f04ffbdabc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.785430] env[61852]: DEBUG nova.network.neutron [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.850086] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292770, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066816} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.850385] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 786.851162] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9e13b6-50b0-435c-a548-48b677c7ec62 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.874730] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] cb50d964-5c0e-4cf3-b652-0f7b7a488f91/cb50d964-5c0e-4cf3-b652-0f7b7a488f91.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 786.874928] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0749f9e8-c9fa-416a-ab2c-537b41d7a802 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.893979] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for the task: (returnval){ [ 786.893979] env[61852]: value = "task-1292771" [ 786.893979] env[61852]: _type = "Task" [ 786.893979] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.901474] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292771, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.096117] env[61852]: DEBUG nova.compute.manager [req-7f229228-4a1c-45ac-a801-8c8cd747a13a req-5c6c184d-a57b-4e83-9bf3-c3ec2872e3c2 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received event network-changed-9e5204e6-6870-43d3-986f-9ca080104e14 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 787.096117] env[61852]: DEBUG nova.compute.manager [req-7f229228-4a1c-45ac-a801-8c8cd747a13a req-5c6c184d-a57b-4e83-9bf3-c3ec2872e3c2 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Refreshing instance network info cache due to event network-changed-9e5204e6-6870-43d3-986f-9ca080104e14. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 787.096117] env[61852]: DEBUG oslo_concurrency.lockutils [req-7f229228-4a1c-45ac-a801-8c8cd747a13a req-5c6c184d-a57b-4e83-9bf3-c3ec2872e3c2 service nova] Acquiring lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.096117] env[61852]: DEBUG oslo_concurrency.lockutils [req-7f229228-4a1c-45ac-a801-8c8cd747a13a req-5c6c184d-a57b-4e83-9bf3-c3ec2872e3c2 service nova] Acquired lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.096117] env[61852]: DEBUG nova.network.neutron [req-7f229228-4a1c-45ac-a801-8c8cd747a13a req-5c6c184d-a57b-4e83-9bf3-c3ec2872e3c2 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Refreshing network info cache for port 9e5204e6-6870-43d3-986f-9ca080104e14 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.114721] env[61852]: DEBUG nova.scheduler.client.report [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 787.185493] env[61852]: DEBUG nova.compute.manager [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 787.243239] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Releasing lock "refresh_cache-46ccab1f-b7af-49df-a38d-af1fa3bac486" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.243634] env[61852]: DEBUG nova.compute.manager [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Instance network_info: |[{"id": "94d9d6be-2b90-477a-a1a2-f04ffbdabc54", "address": "fa:16:3e:f9:bd:bc", "network": {"id": "08104a62-7c1c-40a8-be3f-fc4fe7d609d7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-148853082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544e2807166b4ee58e0672c6d71f6d7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d9d6be-2b", "ovs_interfaceid": "94d9d6be-2b90-477a-a1a2-f04ffbdabc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 787.244094] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:bd:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74e6f6e0-95e6-4531-99e9-0e78350fb655', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94d9d6be-2b90-477a-a1a2-f04ffbdabc54', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 787.252391] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Creating folder: Project (544e2807166b4ee58e0672c6d71f6d7e). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 787.252689] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7662066-8ae4-4596-9d76-35dd2c5e003c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.262997] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Created folder: Project (544e2807166b4ee58e0672c6d71f6d7e) in parent group-v277280. [ 787.263195] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Creating folder: Instances. Parent ref: group-v277310. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 787.263445] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9cf1dbf-474c-4c7e-aadb-cf7b1d9fc208 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.272142] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Created folder: Instances in parent group-v277310. [ 787.272387] env[61852]: DEBUG oslo.service.loopingcall [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 787.272575] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 787.272812] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a189332e-904c-4c97-9c14-a00ec20cdbb1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.289927] env[61852]: INFO nova.compute.manager [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] [instance: d6a46605-aa45-4de3-80a8-cb73b9980669] Took 1.03 seconds to deallocate network for instance. [ 787.297459] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 787.297459] env[61852]: value = "task-1292774" [ 787.297459] env[61852]: _type = "Task" [ 787.297459] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.305224] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292774, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.404460] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292771, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.479922] env[61852]: DEBUG nova.compute.manager [req-487bf6ff-702f-44e8-ac49-e90b70ad9ea5 req-3f65c4b3-1365-4899-b528-c8cfaabdc56d service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Received event network-changed-94d9d6be-2b90-477a-a1a2-f04ffbdabc54 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 787.480107] env[61852]: DEBUG nova.compute.manager [req-487bf6ff-702f-44e8-ac49-e90b70ad9ea5 req-3f65c4b3-1365-4899-b528-c8cfaabdc56d service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Refreshing instance network info cache due to event network-changed-94d9d6be-2b90-477a-a1a2-f04ffbdabc54. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 787.480320] env[61852]: DEBUG oslo_concurrency.lockutils [req-487bf6ff-702f-44e8-ac49-e90b70ad9ea5 req-3f65c4b3-1365-4899-b528-c8cfaabdc56d service nova] Acquiring lock "refresh_cache-46ccab1f-b7af-49df-a38d-af1fa3bac486" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.480459] env[61852]: DEBUG oslo_concurrency.lockutils [req-487bf6ff-702f-44e8-ac49-e90b70ad9ea5 req-3f65c4b3-1365-4899-b528-c8cfaabdc56d service nova] Acquired lock "refresh_cache-46ccab1f-b7af-49df-a38d-af1fa3bac486" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.480818] env[61852]: DEBUG nova.network.neutron [req-487bf6ff-702f-44e8-ac49-e90b70ad9ea5 req-3f65c4b3-1365-4899-b528-c8cfaabdc56d service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Refreshing network info cache for port 94d9d6be-2b90-477a-a1a2-f04ffbdabc54 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.619639] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.620096] env[61852]: DEBUG nova.compute.manager [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 787.622871] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 21.050s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.626030] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.626030] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 787.626030] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.472s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.627287] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d1b53c-8d73-4953-ad42-e85ff33c50ff {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.638280] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22fa52da-5e22-44f7-a831-7f13c38d8f0f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.655042] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e871c8-cbf3-4260-a22e-6f1479a27bae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.663539] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e566adb8-af54-4939-908b-5f0772f17730 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.460516] env[61852]: DEBUG nova.compute.utils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 788.475809] env[61852]: DEBUG nova.compute.manager [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 788.475809] env[61852]: DEBUG nova.network.neutron [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 788.504891] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181432MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 788.505061] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.508654] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.516340] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292774, 'name': CreateVM_Task, 'duration_secs': 0.483484} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.516962] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 788.519762] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.519762] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.519762] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 788.521489] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-267c5132-00b7-4ccc-89be-f5bc7066417c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.523330] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292771, 'name': ReconfigVM_Task, 'duration_secs': 1.051937} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.525854] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Reconfigured VM instance instance-00000036 to attach disk [datastore1] cb50d964-5c0e-4cf3-b652-0f7b7a488f91/cb50d964-5c0e-4cf3-b652-0f7b7a488f91.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 788.527423] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f13757d6-1dd6-4165-9f8a-5f377ccb4cd9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.530490] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for the task: (returnval){ [ 788.530490] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52202c97-de1b-1f73-a4e9-be71a3288999" [ 788.530490] env[61852]: _type = "Task" [ 788.530490] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.537941] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for the task: (returnval){ [ 788.537941] env[61852]: value = "task-1292775" [ 788.537941] env[61852]: _type = "Task" [ 788.537941] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.547366] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52202c97-de1b-1f73-a4e9-be71a3288999, 'name': SearchDatastore_Task, 'duration_secs': 0.01075} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.548317] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.548543] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.548764] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.549390] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.549390] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 788.549561] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff589479-25b2-4ff7-b8c2-9555c0b425b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.554972] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292775, 'name': Rename_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.561859] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 788.562063] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 788.562891] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b166a5e-80ff-4162-9ef0-267da4242b93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.571038] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for the task: (returnval){ [ 788.571038] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524aba0f-5d00-84c9-36f3-4d29d825a47b" [ 788.571038] env[61852]: _type = "Task" [ 788.571038] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.584827] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524aba0f-5d00-84c9-36f3-4d29d825a47b, 'name': SearchDatastore_Task, 'duration_secs': 0.009577} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.592025] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62a89cc2-4f7c-4415-8e71-9bc807c28c8d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.595640] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for the task: (returnval){ [ 788.595640] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5253b5d9-2753-ea73-2b7b-05fa96cbc13f" [ 788.595640] env[61852]: _type = "Task" [ 788.595640] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.600587] env[61852]: DEBUG nova.policy [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5db98c1126cc41b5930b2e5fa823c330', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '783bc6968c91488293479f10b8dc92c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 788.606988] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5253b5d9-2753-ea73-2b7b-05fa96cbc13f, 'name': SearchDatastore_Task, 'duration_secs': 0.009252} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.609541] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.609678] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 46ccab1f-b7af-49df-a38d-af1fa3bac486/46ccab1f-b7af-49df-a38d-af1fa3bac486.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 788.610176] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72b544d7-03f2-4ac1-b9ee-f41c8a851db8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.616772] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for the task: (returnval){ [ 788.616772] env[61852]: value = "task-1292776" [ 788.616772] env[61852]: _type = "Task" [ 788.616772] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.630736] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292776, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.780730] env[61852]: DEBUG nova.network.neutron [req-7f229228-4a1c-45ac-a801-8c8cd747a13a req-5c6c184d-a57b-4e83-9bf3-c3ec2872e3c2 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updated VIF entry in instance network info cache for port 9e5204e6-6870-43d3-986f-9ca080104e14. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 788.781142] env[61852]: DEBUG nova.network.neutron [req-7f229228-4a1c-45ac-a801-8c8cd747a13a req-5c6c184d-a57b-4e83-9bf3-c3ec2872e3c2 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.841942] env[61852]: DEBUG nova.network.neutron [req-487bf6ff-702f-44e8-ac49-e90b70ad9ea5 req-3f65c4b3-1365-4899-b528-c8cfaabdc56d service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Updated VIF entry in instance network info cache for port 94d9d6be-2b90-477a-a1a2-f04ffbdabc54. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 788.841942] env[61852]: DEBUG nova.network.neutron [req-487bf6ff-702f-44e8-ac49-e90b70ad9ea5 req-3f65c4b3-1365-4899-b528-c8cfaabdc56d service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Updating instance_info_cache with network_info: [{"id": "94d9d6be-2b90-477a-a1a2-f04ffbdabc54", "address": "fa:16:3e:f9:bd:bc", "network": {"id": "08104a62-7c1c-40a8-be3f-fc4fe7d609d7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-148853082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544e2807166b4ee58e0672c6d71f6d7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d9d6be-2b", "ovs_interfaceid": "94d9d6be-2b90-477a-a1a2-f04ffbdabc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.867600] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cf707f-683c-4c0d-9e44-aa4e835c29a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.877927] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5d4b5a-5756-4aaa-8b30-6815cafbdd74 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.911180] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bf64da-162c-465d-a6f2-afa7faafb402 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.919539] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b588ba-c7a6-4114-b5b6-75b88334544a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.934931] env[61852]: DEBUG nova.compute.provider_tree [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.957471] env[61852]: DEBUG nova.network.neutron [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Successfully created port: 241fcec0-c8cc-40b0-bfbc-becb93226dc0 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 788.968306] env[61852]: DEBUG nova.compute.manager [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 788.988244] env[61852]: INFO nova.scheduler.client.report [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Deleted allocations for instance d6a46605-aa45-4de3-80a8-cb73b9980669 [ 789.050429] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292775, 'name': Rename_Task, 'duration_secs': 0.249859} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.050762] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 789.051087] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da5163fa-9734-47d8-ace0-2142d4407713 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.059639] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for the task: (returnval){ [ 789.059639] env[61852]: value = "task-1292777" [ 789.059639] env[61852]: _type = "Task" [ 789.059639] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.073074] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292777, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.130045] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292776, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501369} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.130356] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 46ccab1f-b7af-49df-a38d-af1fa3bac486/46ccab1f-b7af-49df-a38d-af1fa3bac486.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 789.130637] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 789.130906] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1309a45f-f342-4154-b50d-43f482a56909 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.137079] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for the task: (returnval){ [ 789.137079] env[61852]: value = "task-1292778" [ 789.137079] env[61852]: _type = "Task" [ 789.137079] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.144696] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292778, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.283640] env[61852]: DEBUG oslo_concurrency.lockutils [req-7f229228-4a1c-45ac-a801-8c8cd747a13a req-5c6c184d-a57b-4e83-9bf3-c3ec2872e3c2 service nova] Releasing lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.341646] env[61852]: DEBUG oslo_concurrency.lockutils [req-487bf6ff-702f-44e8-ac49-e90b70ad9ea5 req-3f65c4b3-1365-4899-b528-c8cfaabdc56d service nova] Releasing lock "refresh_cache-46ccab1f-b7af-49df-a38d-af1fa3bac486" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.439057] env[61852]: DEBUG nova.scheduler.client.report [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 789.498404] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a49bb39-1e59-4a65-aa85-b4835bed3900 tempest-AttachVolumeShelveTestJSON-1652645497 tempest-AttachVolumeShelveTestJSON-1652645497-project-member] Lock "d6a46605-aa45-4de3-80a8-cb73b9980669" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.737s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.569904] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292777, 'name': PowerOnVM_Task} progress is 71%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.646670] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292778, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067294} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.646939] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 789.647693] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c994b8a1-6cf5-4617-b45e-62f616829df7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.670027] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 46ccab1f-b7af-49df-a38d-af1fa3bac486/46ccab1f-b7af-49df-a38d-af1fa3bac486.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 789.670027] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04a77048-da46-4da6-b973-87092dbf0035 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.688169] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for the task: (returnval){ [ 789.688169] env[61852]: value = "task-1292779" [ 789.688169] env[61852]: _type = "Task" [ 789.688169] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.697514] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292779, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.944638] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.321s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.946997] env[61852]: ERROR nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port eca54373-1f16-4210-8551-85373b0ac57c, please check neutron logs for more information. [ 789.946997] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Traceback (most recent call last): [ 789.946997] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 789.946997] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] self.driver.spawn(context, instance, image_meta, [ 789.946997] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 789.946997] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 789.946997] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 789.946997] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] vm_ref = self.build_virtual_machine(instance, [ 789.946997] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 789.946997] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 789.946997] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] for vif in network_info: [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] return self._sync_wrapper(fn, *args, **kwargs) [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] self.wait() [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] self[:] = self._gt.wait() [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] return self._exit_event.wait() [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] result = hub.switch() [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 789.947542] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] return self.greenlet.switch() [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] result = function(*args, **kwargs) [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] return func(*args, **kwargs) [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] raise e [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] nwinfo = self.network_api.allocate_for_instance( [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] created_port_ids = self._update_ports_for_instance( [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] with excutils.save_and_reraise_exception(): [ 789.947877] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] self.force_reraise() [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] raise self.value [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] updated_port = self._update_port( [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] _ensure_no_port_binding_failure(port) [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] raise exception.PortBindingFailed(port_id=port['id']) [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] nova.exception.PortBindingFailed: Binding failed for port eca54373-1f16-4210-8551-85373b0ac57c, please check neutron logs for more information. [ 789.948209] env[61852]: ERROR nova.compute.manager [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] [ 789.948494] env[61852]: DEBUG nova.compute.utils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Binding failed for port eca54373-1f16-4210-8551-85373b0ac57c, please check neutron logs for more information. {{(pid=61852) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 789.948494] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.913s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.949088] env[61852]: INFO nova.compute.claims [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 789.952391] env[61852]: DEBUG nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Build of instance b0433331-f005-49e0-bd22-bc78f970e3cd was re-scheduled: Binding failed for port eca54373-1f16-4210-8551-85373b0ac57c, please check neutron logs for more information. {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 789.952884] env[61852]: DEBUG nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Unplugging VIFs for instance {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 789.953138] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "refresh_cache-b0433331-f005-49e0-bd22-bc78f970e3cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.953286] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquired lock "refresh_cache-b0433331-f005-49e0-bd22-bc78f970e3cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.953439] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.978453] env[61852]: DEBUG nova.compute.manager [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 790.000598] env[61852]: DEBUG nova.compute.manager [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 790.012093] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 790.012359] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 790.012551] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 790.012803] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 790.012878] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 790.013325] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 790.013325] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 790.013439] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 790.013589] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 790.013915] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 790.013994] env[61852]: DEBUG nova.virt.hardware [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 790.015197] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457416a4-67c6-4d99-b1a5-e800d01a7171 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.023501] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751932b2-979c-43cc-89dd-91fb37322ecf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.070783] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292777, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.200227] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292779, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.485431] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.534706] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.572228] env[61852]: DEBUG oslo_vmware.api [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292777, 'name': PowerOnVM_Task, 'duration_secs': 1.067164} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.572513] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 790.572739] env[61852]: INFO nova.compute.manager [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Took 9.83 seconds to spawn the instance on the hypervisor. [ 790.572926] env[61852]: DEBUG nova.compute.manager [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 790.573707] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d830b1-83fe-43e8-9cbb-939b753e1b06 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.588155] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.699182] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292779, 'name': ReconfigVM_Task, 'duration_secs': 0.671351} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.699454] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 46ccab1f-b7af-49df-a38d-af1fa3bac486/46ccab1f-b7af-49df-a38d-af1fa3bac486.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.700063] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-473a0b36-48de-4c80-b27d-fedd8209b213 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.707810] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for the task: (returnval){ [ 790.707810] env[61852]: value = "task-1292780" [ 790.707810] env[61852]: _type = "Task" [ 790.707810] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.717922] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292780, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.953151] env[61852]: DEBUG nova.compute.manager [req-9aed28be-7a38-459d-a9a1-20c3cb762f42 req-7bcef6fd-053e-4e78-9349-7a0d8f981a65 service nova] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Received event network-vif-plugged-241fcec0-c8cc-40b0-bfbc-becb93226dc0 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 790.953350] env[61852]: DEBUG oslo_concurrency.lockutils [req-9aed28be-7a38-459d-a9a1-20c3cb762f42 req-7bcef6fd-053e-4e78-9349-7a0d8f981a65 service nova] Acquiring lock "c94066d5-2e5f-4059-bdc5-385d517f1d84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.953563] env[61852]: DEBUG oslo_concurrency.lockutils [req-9aed28be-7a38-459d-a9a1-20c3cb762f42 req-7bcef6fd-053e-4e78-9349-7a0d8f981a65 service nova] Lock "c94066d5-2e5f-4059-bdc5-385d517f1d84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.953761] env[61852]: DEBUG oslo_concurrency.lockutils [req-9aed28be-7a38-459d-a9a1-20c3cb762f42 req-7bcef6fd-053e-4e78-9349-7a0d8f981a65 service nova] Lock "c94066d5-2e5f-4059-bdc5-385d517f1d84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.953943] env[61852]: DEBUG nova.compute.manager [req-9aed28be-7a38-459d-a9a1-20c3cb762f42 req-7bcef6fd-053e-4e78-9349-7a0d8f981a65 service nova] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] No waiting events found dispatching network-vif-plugged-241fcec0-c8cc-40b0-bfbc-becb93226dc0 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 790.954122] env[61852]: WARNING nova.compute.manager [req-9aed28be-7a38-459d-a9a1-20c3cb762f42 req-7bcef6fd-053e-4e78-9349-7a0d8f981a65 service nova] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Received unexpected event network-vif-plugged-241fcec0-c8cc-40b0-bfbc-becb93226dc0 for instance with vm_state building and task_state spawning. [ 791.091045] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Releasing lock "refresh_cache-b0433331-f005-49e0-bd22-bc78f970e3cd" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.091045] env[61852]: DEBUG nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61852) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 791.091045] env[61852]: DEBUG nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 791.091045] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 791.091802] env[61852]: INFO nova.compute.manager [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Took 31.85 seconds to build instance. [ 791.107336] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.217213] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292780, 'name': Rename_Task, 'duration_secs': 0.168413} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.217479] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 791.217713] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0b7e998-01cb-48b8-b09f-2930f794550e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.223707] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for the task: (returnval){ [ 791.223707] env[61852]: value = "task-1292781" [ 791.223707] env[61852]: _type = "Task" [ 791.223707] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.232316] env[61852]: DEBUG nova.network.neutron [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Successfully updated port: 241fcec0-c8cc-40b0-bfbc-becb93226dc0 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 791.235205] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292781, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.297816] env[61852]: DEBUG nova.compute.manager [req-166a3441-9e2f-4036-b648-7fd98cc39d6f req-c4d56271-d2a9-4c06-8dbc-b876f8cc0dbf service nova] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Received event network-changed-241fcec0-c8cc-40b0-bfbc-becb93226dc0 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 791.298022] env[61852]: DEBUG nova.compute.manager [req-166a3441-9e2f-4036-b648-7fd98cc39d6f req-c4d56271-d2a9-4c06-8dbc-b876f8cc0dbf service nova] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Refreshing instance network info cache due to event network-changed-241fcec0-c8cc-40b0-bfbc-becb93226dc0. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 791.298239] env[61852]: DEBUG oslo_concurrency.lockutils [req-166a3441-9e2f-4036-b648-7fd98cc39d6f req-c4d56271-d2a9-4c06-8dbc-b876f8cc0dbf service nova] Acquiring lock "refresh_cache-c94066d5-2e5f-4059-bdc5-385d517f1d84" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.298382] env[61852]: DEBUG oslo_concurrency.lockutils [req-166a3441-9e2f-4036-b648-7fd98cc39d6f req-c4d56271-d2a9-4c06-8dbc-b876f8cc0dbf service nova] Acquired lock "refresh_cache-c94066d5-2e5f-4059-bdc5-385d517f1d84" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.298956] env[61852]: DEBUG nova.network.neutron [req-166a3441-9e2f-4036-b648-7fd98cc39d6f req-c4d56271-d2a9-4c06-8dbc-b876f8cc0dbf service nova] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Refreshing network info cache for port 241fcec0-c8cc-40b0-bfbc-becb93226dc0 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 791.354109] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d55c14-7e85-4bb0-9317-db9a237b37e6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.361212] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e24bea4-8b47-443a-b134-98db1d36501e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.393665] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32cf7975-476d-4ea3-9294-46021bd31e68 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.400891] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dde7686-1ee9-488c-aa3b-f9a2810b252d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.413850] env[61852]: DEBUG nova.compute.provider_tree [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 791.594308] env[61852]: DEBUG oslo_concurrency.lockutils [None req-704f0def-d933-44c4-a2a4-2453562538f2 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.121s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.618610] env[61852]: DEBUG nova.network.neutron [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.734434] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292781, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.736192] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "refresh_cache-c94066d5-2e5f-4059-bdc5-385d517f1d84" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.844066] env[61852]: DEBUG nova.network.neutron [req-166a3441-9e2f-4036-b648-7fd98cc39d6f req-c4d56271-d2a9-4c06-8dbc-b876f8cc0dbf service nova] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.937200] env[61852]: DEBUG nova.network.neutron [req-166a3441-9e2f-4036-b648-7fd98cc39d6f req-c4d56271-d2a9-4c06-8dbc-b876f8cc0dbf service nova] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.956264] env[61852]: DEBUG nova.scheduler.client.report [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 791.956530] env[61852]: DEBUG nova.compute.provider_tree [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 81 to 82 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 791.956707] env[61852]: DEBUG nova.compute.provider_tree [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.097191] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 792.121346] env[61852]: INFO nova.compute.manager [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: b0433331-f005-49e0-bd22-bc78f970e3cd] Took 1.03 seconds to deallocate network for instance. [ 792.238578] env[61852]: DEBUG oslo_vmware.api [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292781, 'name': PowerOnVM_Task, 'duration_secs': 0.557233} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.238578] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 792.238578] env[61852]: INFO nova.compute.manager [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Took 7.02 seconds to spawn the instance on the hypervisor. [ 792.238741] env[61852]: DEBUG nova.compute.manager [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 792.240169] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f5b16a-4b0b-4a9a-ba42-74b306b82eb2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.440723] env[61852]: DEBUG oslo_concurrency.lockutils [req-166a3441-9e2f-4036-b648-7fd98cc39d6f req-c4d56271-d2a9-4c06-8dbc-b876f8cc0dbf service nova] Releasing lock "refresh_cache-c94066d5-2e5f-4059-bdc5-385d517f1d84" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.441117] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "refresh_cache-c94066d5-2e5f-4059-bdc5-385d517f1d84" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.441293] env[61852]: DEBUG nova.network.neutron [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 792.462197] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.463171] env[61852]: DEBUG nova.compute.manager [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 792.466030] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.916s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.467270] env[61852]: INFO nova.compute.claims [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 792.621009] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.761247] env[61852]: INFO nova.compute.manager [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Took 31.66 seconds to build instance. [ 792.971695] env[61852]: DEBUG nova.compute.utils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 792.975083] env[61852]: DEBUG nova.compute.manager [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 792.975274] env[61852]: DEBUG nova.network.neutron [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 792.985186] env[61852]: DEBUG nova.network.neutron [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.008065] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquiring lock "46ccab1f-b7af-49df-a38d-af1fa3bac486" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.051407] env[61852]: DEBUG nova.policy [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c998c34acada4242a24d89f525f2c810', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86cbc5d693b44d8586fe2136ad498997', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 793.133470] env[61852]: DEBUG nova.network.neutron [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Updating instance_info_cache with network_info: [{"id": "241fcec0-c8cc-40b0-bfbc-becb93226dc0", "address": "fa:16:3e:d2:e5:e7", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241fcec0-c8", "ovs_interfaceid": "241fcec0-c8cc-40b0-bfbc-becb93226dc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.162549] env[61852]: INFO nova.scheduler.client.report [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Deleted allocations for instance b0433331-f005-49e0-bd22-bc78f970e3cd [ 793.267352] env[61852]: DEBUG oslo_concurrency.lockutils [None req-00aee4fe-df3a-484f-9008-c3667a394f9a tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Lock "46ccab1f-b7af-49df-a38d-af1fa3bac486" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.848s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.268760] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Lock "46ccab1f-b7af-49df-a38d-af1fa3bac486" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.261s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.269033] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquiring lock "46ccab1f-b7af-49df-a38d-af1fa3bac486-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.269297] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Lock "46ccab1f-b7af-49df-a38d-af1fa3bac486-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.269581] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Lock "46ccab1f-b7af-49df-a38d-af1fa3bac486-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.272050] env[61852]: INFO nova.compute.manager [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Terminating instance [ 793.274154] env[61852]: DEBUG nova.compute.manager [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 793.274395] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 793.275245] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5b02aa-9e73-44e3-8645-e89b9548bb4f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.286644] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 793.287606] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd460d39-086b-44c0-a7b2-be780bfd5d3a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.296007] env[61852]: DEBUG oslo_vmware.api [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for the task: (returnval){ [ 793.296007] env[61852]: value = "task-1292782" [ 793.296007] env[61852]: _type = "Task" [ 793.296007] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.305326] env[61852]: DEBUG oslo_vmware.api [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.479019] env[61852]: DEBUG nova.compute.manager [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 793.596984] env[61852]: DEBUG nova.network.neutron [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Successfully created port: e7aa8d2a-5703-4b7d-9953-bbc015d805d5 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 793.639016] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "refresh_cache-c94066d5-2e5f-4059-bdc5-385d517f1d84" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.639016] env[61852]: DEBUG nova.compute.manager [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Instance network_info: |[{"id": "241fcec0-c8cc-40b0-bfbc-becb93226dc0", "address": "fa:16:3e:d2:e5:e7", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241fcec0-c8", "ovs_interfaceid": "241fcec0-c8cc-40b0-bfbc-becb93226dc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 793.639658] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:e5:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3e0aae3-33d1-403b-bfaf-306f77a1422e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '241fcec0-c8cc-40b0-bfbc-becb93226dc0', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 793.645396] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating folder: Project (783bc6968c91488293479f10b8dc92c1). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 793.648685] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5eb8efaa-d978-49bb-9c2b-9c443a2ffbe5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.661040] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Created folder: Project (783bc6968c91488293479f10b8dc92c1) in parent group-v277280. [ 793.661040] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating folder: Instances. Parent ref: group-v277313. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 793.661040] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eff16547-6422-41bf-9e58-f72163739d3f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.672235] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Created folder: Instances in parent group-v277313. [ 793.672235] env[61852]: DEBUG oslo.service.loopingcall [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 793.672235] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 793.672235] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88694850-bf92-4a7d-b004-ab057096b5c1 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "b0433331-f005-49e0-bd22-bc78f970e3cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.397s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.672235] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5a746bc-0765-498a-989a-ace1f68de5e6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.696061] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 793.696061] env[61852]: value = "task-1292785" [ 793.696061] env[61852]: _type = "Task" [ 793.696061] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.704917] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292785, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.773422] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 793.787746] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquiring lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.787986] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.788354] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquiring lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.788548] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.788716] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.792211] env[61852]: INFO nova.compute.manager [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Terminating instance [ 793.797021] env[61852]: DEBUG nova.compute.manager [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 793.797021] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 793.797021] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4347efc9-284c-4dc9-bad4-970c7d530598 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.807680] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 793.807680] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3332eb86-4463-4e00-9fe6-5ce734da3272 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.811789] env[61852]: DEBUG oslo_vmware.api [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292782, 'name': PowerOffVM_Task, 'duration_secs': 0.196139} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.812307] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 793.812479] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 793.813393] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7308e4d1-5ff2-403c-8123-436c036fd4a6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.816481] env[61852]: DEBUG oslo_vmware.api [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for the task: (returnval){ [ 793.816481] env[61852]: value = "task-1292786" [ 793.816481] env[61852]: _type = "Task" [ 793.816481] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.828740] env[61852]: DEBUG oslo_vmware.api [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.883836] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ae2ebb-2baf-4a69-bc40-2d37630c1f53 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.893357] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835f895f-9275-46e9-aae0-897625844ef3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.896765] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 793.897090] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 793.897288] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Deleting the datastore file [datastore2] 46ccab1f-b7af-49df-a38d-af1fa3bac486 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 793.897530] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f79d335a-1a6e-43b8-b0e1-e14746949f05 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.931567] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db8e5e7-48b2-4040-a799-4ac6ec2f2f00 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.934622] env[61852]: DEBUG oslo_vmware.api [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for the task: (returnval){ [ 793.934622] env[61852]: value = "task-1292788" [ 793.934622] env[61852]: _type = "Task" [ 793.934622] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.942406] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987232f3-ca61-48d9-bcec-deb16e0c4cf1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.950160] env[61852]: DEBUG oslo_vmware.api [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.959860] env[61852]: DEBUG nova.compute.provider_tree [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.110135] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.110429] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.110637] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.110820] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.110990] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.113977] env[61852]: INFO nova.compute.manager [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Terminating instance [ 794.116452] env[61852]: DEBUG nova.compute.manager [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 794.116670] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 794.117730] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a44555b-483e-4931-92bc-983cecd622ce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.125623] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 794.125870] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4c4a5d4-d669-4d7a-b58d-fb945ee7e598 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.132301] env[61852]: DEBUG oslo_vmware.api [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for the task: (returnval){ [ 794.132301] env[61852]: value = "task-1292789" [ 794.132301] env[61852]: _type = "Task" [ 794.132301] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.141087] env[61852]: DEBUG oslo_vmware.api [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.192146] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 794.205345] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292785, 'name': CreateVM_Task, 'duration_secs': 0.397978} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.205707] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 794.206743] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.206962] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.207291] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 794.207729] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d9a49ee-4393-4f91-9ab4-d5752a816c5f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.212627] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 794.212627] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52cb7b8b-b791-ca22-fc10-0e8fd85835da" [ 794.212627] env[61852]: _type = "Task" [ 794.212627] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.220679] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52cb7b8b-b791-ca22-fc10-0e8fd85835da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.294248] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.326360] env[61852]: DEBUG oslo_vmware.api [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292786, 'name': PowerOffVM_Task, 'duration_secs': 0.234283} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.326676] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.326896] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.327242] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-221abca6-20be-4558-9010-ea27fb016577 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.397619] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.397829] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.398016] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Deleting the datastore file [datastore1] cb50d964-5c0e-4cf3-b652-0f7b7a488f91 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.398292] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-507ff003-0289-4b34-b7b8-9fd275fd1fd3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.403857] env[61852]: DEBUG oslo_vmware.api [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for the task: (returnval){ [ 794.403857] env[61852]: value = "task-1292791" [ 794.403857] env[61852]: _type = "Task" [ 794.403857] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.411679] env[61852]: DEBUG oslo_vmware.api [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292791, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.443780] env[61852]: DEBUG oslo_vmware.api [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Task: {'id': task-1292788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148784} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.444097] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.444311] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 794.444505] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 794.444680] env[61852]: INFO nova.compute.manager [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Took 1.17 seconds to destroy the instance on the hypervisor. [ 794.445236] env[61852]: DEBUG oslo.service.loopingcall [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 794.445341] env[61852]: DEBUG nova.compute.manager [-] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 794.445496] env[61852]: DEBUG nova.network.neutron [-] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.463971] env[61852]: DEBUG nova.scheduler.client.report [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 794.490505] env[61852]: DEBUG nova.compute.manager [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 794.515839] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 794.516091] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 794.516249] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.516428] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 794.516573] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.516715] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 794.516981] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 794.517176] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 794.517343] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 794.517503] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 794.517672] env[61852]: DEBUG nova.virt.hardware [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 794.518572] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c440eb7-0744-4147-9a9f-7f7608d12eb1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.529491] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236ed5c6-d435-4329-ba53-b1dbd5dceb67 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.641959] env[61852]: DEBUG oslo_vmware.api [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292789, 'name': PowerOffVM_Task, 'duration_secs': 0.342542} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.642804] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.642804] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.643149] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dcc2092c-591a-4477-9b4c-f67b591f67c1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.699909] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.700143] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.700399] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Deleting the datastore file [datastore1] d48cefda-0b05-4ec0-8c1d-bc25cd491faf {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.702586] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88020a65-cb61-480d-8f38-a87f95e4f012 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.709555] env[61852]: DEBUG oslo_vmware.api [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for the task: (returnval){ [ 794.709555] env[61852]: value = "task-1292793" [ 794.709555] env[61852]: _type = "Task" [ 794.709555] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.719054] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.723701] env[61852]: DEBUG oslo_vmware.api [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292793, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.727611] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52cb7b8b-b791-ca22-fc10-0e8fd85835da, 'name': SearchDatastore_Task, 'duration_secs': 0.010632} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.727910] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.728180] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 794.728450] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.728606] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.728811] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 794.729132] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a11b0137-3fa5-48a3-abf2-dab570ddf6d0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.738048] env[61852]: DEBUG nova.compute.manager [req-3df10dbf-2e85-4fd8-949a-d8b65f531d94 req-7bcc7775-5484-49e9-99c3-049a950187ca service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Received event network-vif-deleted-94d9d6be-2b90-477a-a1a2-f04ffbdabc54 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 794.738655] env[61852]: INFO nova.compute.manager [req-3df10dbf-2e85-4fd8-949a-d8b65f531d94 req-7bcc7775-5484-49e9-99c3-049a950187ca service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Neutron deleted interface 94d9d6be-2b90-477a-a1a2-f04ffbdabc54; detaching it from the instance and deleting it from the info cache [ 794.738655] env[61852]: DEBUG nova.network.neutron [req-3df10dbf-2e85-4fd8-949a-d8b65f531d94 req-7bcc7775-5484-49e9-99c3-049a950187ca service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.741110] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 794.741321] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 794.742296] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3380e7e8-8b3a-4ecf-bb7e-219e2f440575 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.748651] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 794.748651] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526ce7ef-f195-e22a-7726-0c69dfa5bb50" [ 794.748651] env[61852]: _type = "Task" [ 794.748651] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.758647] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526ce7ef-f195-e22a-7726-0c69dfa5bb50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.920802] env[61852]: DEBUG oslo_vmware.api [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Task: {'id': task-1292791, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124924} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.921182] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.921519] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 794.921754] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 794.921972] env[61852]: INFO nova.compute.manager [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Took 1.13 seconds to destroy the instance on the hypervisor. [ 794.922271] env[61852]: DEBUG oslo.service.loopingcall [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 794.922507] env[61852]: DEBUG nova.compute.manager [-] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 794.922777] env[61852]: DEBUG nova.network.neutron [-] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.968812] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.503s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.969166] env[61852]: DEBUG nova.compute.manager [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 794.972297] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.437s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.973823] env[61852]: INFO nova.compute.claims [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.216743] env[61852]: DEBUG nova.network.neutron [-] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.228348] env[61852]: DEBUG oslo_vmware.api [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Task: {'id': task-1292793, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128387} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.228498] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 795.229087] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 795.229170] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 795.229945] env[61852]: INFO nova.compute.manager [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Took 1.11 seconds to destroy the instance on the hypervisor. [ 795.230236] env[61852]: DEBUG oslo.service.loopingcall [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 795.230432] env[61852]: DEBUG nova.compute.manager [-] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 795.233018] env[61852]: DEBUG nova.network.neutron [-] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 795.244815] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c987ea1-136c-4b36-9d06-da1f406df438 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.260068] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0305decc-c973-4301-a724-53b4ac757fd5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.276309] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526ce7ef-f195-e22a-7726-0c69dfa5bb50, 'name': SearchDatastore_Task, 'duration_secs': 0.01009} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.277087] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03c36c1c-0fee-4aae-864d-7bb1823c62f8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.283923] env[61852]: DEBUG nova.compute.manager [req-3df10dbf-2e85-4fd8-949a-d8b65f531d94 req-7bcc7775-5484-49e9-99c3-049a950187ca service nova] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Detach interface failed, port_id=94d9d6be-2b90-477a-a1a2-f04ffbdabc54, reason: Instance 46ccab1f-b7af-49df-a38d-af1fa3bac486 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 795.290021] env[61852]: DEBUG nova.compute.manager [req-161335eb-211b-4bed-95b8-1db026d37dd6 req-fade3b9e-5907-423e-bc34-6be3a78473dc service nova] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Received event network-vif-plugged-e7aa8d2a-5703-4b7d-9953-bbc015d805d5 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 795.290021] env[61852]: DEBUG oslo_concurrency.lockutils [req-161335eb-211b-4bed-95b8-1db026d37dd6 req-fade3b9e-5907-423e-bc34-6be3a78473dc service nova] Acquiring lock "f18906e9-67b3-4537-9169-9d275e2ec4e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.290021] env[61852]: DEBUG oslo_concurrency.lockutils [req-161335eb-211b-4bed-95b8-1db026d37dd6 req-fade3b9e-5907-423e-bc34-6be3a78473dc service nova] Lock "f18906e9-67b3-4537-9169-9d275e2ec4e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.290021] env[61852]: DEBUG oslo_concurrency.lockutils [req-161335eb-211b-4bed-95b8-1db026d37dd6 req-fade3b9e-5907-423e-bc34-6be3a78473dc service nova] Lock "f18906e9-67b3-4537-9169-9d275e2ec4e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.290021] env[61852]: DEBUG nova.compute.manager [req-161335eb-211b-4bed-95b8-1db026d37dd6 req-fade3b9e-5907-423e-bc34-6be3a78473dc service nova] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] No waiting events found dispatching network-vif-plugged-e7aa8d2a-5703-4b7d-9953-bbc015d805d5 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 795.290263] env[61852]: WARNING nova.compute.manager [req-161335eb-211b-4bed-95b8-1db026d37dd6 req-fade3b9e-5907-423e-bc34-6be3a78473dc service nova] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Received unexpected event network-vif-plugged-e7aa8d2a-5703-4b7d-9953-bbc015d805d5 for instance with vm_state building and task_state spawning. [ 795.292463] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 795.292463] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f461d6-5454-e778-48c0-7964cbc523ad" [ 795.292463] env[61852]: _type = "Task" [ 795.292463] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.301868] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f461d6-5454-e778-48c0-7964cbc523ad, 'name': SearchDatastore_Task, 'duration_secs': 0.008745} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.302126] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.302377] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] c94066d5-2e5f-4059-bdc5-385d517f1d84/c94066d5-2e5f-4059-bdc5-385d517f1d84.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 795.302624] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32133d6c-53fd-4cfd-908a-122d1e86f5b0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.311566] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 795.311566] env[61852]: value = "task-1292794" [ 795.311566] env[61852]: _type = "Task" [ 795.311566] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.321082] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292794, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.419784] env[61852]: DEBUG nova.network.neutron [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Successfully updated port: e7aa8d2a-5703-4b7d-9953-bbc015d805d5 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 795.478764] env[61852]: DEBUG nova.compute.utils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 795.482164] env[61852]: DEBUG nova.compute.manager [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 795.482699] env[61852]: DEBUG nova.network.neutron [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 795.546321] env[61852]: DEBUG nova.policy [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c998c34acada4242a24d89f525f2c810', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86cbc5d693b44d8586fe2136ad498997', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 795.680188] env[61852]: DEBUG nova.network.neutron [-] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.728396] env[61852]: INFO nova.compute.manager [-] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Took 1.28 seconds to deallocate network for instance. [ 795.821543] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292794, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468397} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.821752] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] c94066d5-2e5f-4059-bdc5-385d517f1d84/c94066d5-2e5f-4059-bdc5-385d517f1d84.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 795.822029] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 795.822334] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2ea3b6b-9744-4798-84a0-4e3808345e4f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.824929] env[61852]: DEBUG nova.network.neutron [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Successfully created port: 9a926e1c-a6f1-408c-84f3-dfb08cb0464c {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 795.831945] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 795.831945] env[61852]: value = "task-1292795" [ 795.831945] env[61852]: _type = "Task" [ 795.831945] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.839680] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292795, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.921323] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "refresh_cache-f18906e9-67b3-4537-9169-9d275e2ec4e4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.921323] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquired lock "refresh_cache-f18906e9-67b3-4537-9169-9d275e2ec4e4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.921323] env[61852]: DEBUG nova.network.neutron [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 795.978164] env[61852]: DEBUG nova.network.neutron [-] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.985795] env[61852]: DEBUG nova.compute.manager [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 796.184390] env[61852]: INFO nova.compute.manager [-] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Took 1.26 seconds to deallocate network for instance. [ 796.238196] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.272844] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215206ab-3834-4bb6-b076-06e26e0a9983 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.279906] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c36bb3-c00b-452c-9137-ef04a9e04000 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.309465] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf70e9d5-9ce2-4233-b497-a105d7e11b5d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.316052] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d8240a-f289-4c54-9ccb-dc44e15b1868 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.328945] env[61852]: DEBUG nova.compute.provider_tree [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 796.340015] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292795, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060222} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.340290] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 796.341046] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e73437-11a8-47f3-b6f3-a1a4134d2b07 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.362392] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] c94066d5-2e5f-4059-bdc5-385d517f1d84/c94066d5-2e5f-4059-bdc5-385d517f1d84.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 796.362864] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc8ae300-372f-4db3-bf76-afb3d9cb7b45 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.382618] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 796.382618] env[61852]: value = "task-1292796" [ 796.382618] env[61852]: _type = "Task" [ 796.382618] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.390104] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292796, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.453920] env[61852]: DEBUG nova.network.neutron [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.480885] env[61852]: INFO nova.compute.manager [-] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Took 1.25 seconds to deallocate network for instance. [ 796.619023] env[61852]: DEBUG nova.network.neutron [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Updating instance_info_cache with network_info: [{"id": "e7aa8d2a-5703-4b7d-9953-bbc015d805d5", "address": "fa:16:3e:bb:59:cc", "network": {"id": "8b69c186-4a11-44d5-871d-fefb7cc45cb8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f81b609db0954f0a9e9474a2fd875f0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7aa8d2a-57", "ovs_interfaceid": "e7aa8d2a-5703-4b7d-9953-bbc015d805d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.689394] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.775061] env[61852]: DEBUG nova.compute.manager [req-ccea3593-40ef-4e0c-aeb4-a355c7c5191e req-5b56ee00-d0ec-4d8f-95c0-ced130cd9a1d service nova] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Received event network-vif-deleted-1f3009be-a3c0-4ce3-b287-2d744cee79c4 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 796.775246] env[61852]: DEBUG nova.compute.manager [req-ccea3593-40ef-4e0c-aeb4-a355c7c5191e req-5b56ee00-d0ec-4d8f-95c0-ced130cd9a1d service nova] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Received event network-vif-deleted-916e36f2-cfed-41bf-bad3-33d92e9ef290 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 796.861546] env[61852]: DEBUG nova.scheduler.client.report [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 82 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 796.861818] env[61852]: DEBUG nova.compute.provider_tree [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 82 to 83 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 796.862047] env[61852]: DEBUG nova.compute.provider_tree [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 796.892548] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292796, 'name': ReconfigVM_Task, 'duration_secs': 0.329274} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.892850] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Reconfigured VM instance instance-00000038 to attach disk [datastore2] c94066d5-2e5f-4059-bdc5-385d517f1d84/c94066d5-2e5f-4059-bdc5-385d517f1d84.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 796.893472] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-685de55b-15e2-4212-9de3-5c610c90a06b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.900802] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 796.900802] env[61852]: value = "task-1292797" [ 796.900802] env[61852]: _type = "Task" [ 796.900802] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.910020] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292797, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.987250] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.999652] env[61852]: DEBUG nova.compute.manager [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 797.025801] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 797.026071] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 797.026233] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.026411] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 797.026555] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.026698] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 797.026902] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 797.027283] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 797.027488] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 797.027653] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 797.027826] env[61852]: DEBUG nova.virt.hardware [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 797.028692] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25904661-0fd5-4412-af1c-7e05c0c9e3cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.037471] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acac0bb2-64fe-4f58-86a8-8db02b71865d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.125494] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Releasing lock "refresh_cache-f18906e9-67b3-4537-9169-9d275e2ec4e4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.125853] env[61852]: DEBUG nova.compute.manager [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Instance network_info: |[{"id": "e7aa8d2a-5703-4b7d-9953-bbc015d805d5", "address": "fa:16:3e:bb:59:cc", "network": {"id": "8b69c186-4a11-44d5-871d-fefb7cc45cb8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f81b609db0954f0a9e9474a2fd875f0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7aa8d2a-57", "ovs_interfaceid": "e7aa8d2a-5703-4b7d-9953-bbc015d805d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 797.126295] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:59:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90328c7b-15c4-4742-805b-755248d67029', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e7aa8d2a-5703-4b7d-9953-bbc015d805d5', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.134587] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Creating folder: Project (86cbc5d693b44d8586fe2136ad498997). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.134902] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32f7d3ea-f7c1-4ebf-a9f6-6c966823db93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.146422] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Created folder: Project (86cbc5d693b44d8586fe2136ad498997) in parent group-v277280. [ 797.146610] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Creating folder: Instances. Parent ref: group-v277316. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.146916] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b961099b-3a96-4110-81d9-5e880dab69fa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.155891] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Created folder: Instances in parent group-v277316. [ 797.156134] env[61852]: DEBUG oslo.service.loopingcall [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.156323] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.156524] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-288ec562-a447-4c41-922d-e2e268ab5a77 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.177338] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.177338] env[61852]: value = "task-1292800" [ 797.177338] env[61852]: _type = "Task" [ 797.177338] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.185163] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292800, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.318587] env[61852]: DEBUG nova.compute.manager [req-f7f511bd-f808-47d1-8baf-1dd99ebc582b req-d6fd21ec-9755-4761-be25-c405a231915f service nova] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Received event network-changed-e7aa8d2a-5703-4b7d-9953-bbc015d805d5 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 797.318792] env[61852]: DEBUG nova.compute.manager [req-f7f511bd-f808-47d1-8baf-1dd99ebc582b req-d6fd21ec-9755-4761-be25-c405a231915f service nova] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Refreshing instance network info cache due to event network-changed-e7aa8d2a-5703-4b7d-9953-bbc015d805d5. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 797.319020] env[61852]: DEBUG oslo_concurrency.lockutils [req-f7f511bd-f808-47d1-8baf-1dd99ebc582b req-d6fd21ec-9755-4761-be25-c405a231915f service nova] Acquiring lock "refresh_cache-f18906e9-67b3-4537-9169-9d275e2ec4e4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.319162] env[61852]: DEBUG oslo_concurrency.lockutils [req-f7f511bd-f808-47d1-8baf-1dd99ebc582b req-d6fd21ec-9755-4761-be25-c405a231915f service nova] Acquired lock "refresh_cache-f18906e9-67b3-4537-9169-9d275e2ec4e4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.319313] env[61852]: DEBUG nova.network.neutron [req-f7f511bd-f808-47d1-8baf-1dd99ebc582b req-d6fd21ec-9755-4761-be25-c405a231915f service nova] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Refreshing network info cache for port e7aa8d2a-5703-4b7d-9953-bbc015d805d5 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.351339] env[61852]: DEBUG nova.network.neutron [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Successfully updated port: 9a926e1c-a6f1-408c-84f3-dfb08cb0464c {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 797.367259] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.367787] env[61852]: DEBUG nova.compute.manager [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 797.370208] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.365s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.373525] env[61852]: INFO nova.compute.claims [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 797.413819] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292797, 'name': Rename_Task, 'duration_secs': 0.142737} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.413819] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 797.413819] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43859e5e-6f3e-4341-9caf-d8f4691b48ae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.421644] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 797.421644] env[61852]: value = "task-1292801" [ 797.421644] env[61852]: _type = "Task" [ 797.421644] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.430039] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292801, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.687160] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292800, 'name': CreateVM_Task, 'duration_secs': 0.389105} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.687335] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 797.687989] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.688241] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.688564] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 797.688800] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecb44079-3067-43a6-a5d0-faf26777d8b0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.692816] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 797.692816] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c3113f-e7df-80b5-12f7-82acc1e8165c" [ 797.692816] env[61852]: _type = "Task" [ 797.692816] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.699890] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c3113f-e7df-80b5-12f7-82acc1e8165c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.854478] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "refresh_cache-8897a654-6805-45b0-b12b-16f7981d33ad" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.854636] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquired lock "refresh_cache-8897a654-6805-45b0-b12b-16f7981d33ad" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.854798] env[61852]: DEBUG nova.network.neutron [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 797.877599] env[61852]: DEBUG nova.compute.utils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 797.880792] env[61852]: DEBUG nova.compute.manager [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 797.880944] env[61852]: DEBUG nova.network.neutron [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 797.931127] env[61852]: DEBUG oslo_vmware.api [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292801, 'name': PowerOnVM_Task, 'duration_secs': 0.461895} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.931477] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.931593] env[61852]: INFO nova.compute.manager [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Took 7.95 seconds to spawn the instance on the hypervisor. [ 797.931798] env[61852]: DEBUG nova.compute.manager [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 797.932641] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fb4e3e-ef84-474d-ad53-180b686da04f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.936304] env[61852]: DEBUG nova.policy [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac73e3c1bec549918611a84413cf7ac7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bfc80b8d433b4f6386a3c9a133f2164a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 798.046226] env[61852]: DEBUG nova.network.neutron [req-f7f511bd-f808-47d1-8baf-1dd99ebc582b req-d6fd21ec-9755-4761-be25-c405a231915f service nova] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Updated VIF entry in instance network info cache for port e7aa8d2a-5703-4b7d-9953-bbc015d805d5. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 798.046424] env[61852]: DEBUG nova.network.neutron [req-f7f511bd-f808-47d1-8baf-1dd99ebc582b req-d6fd21ec-9755-4761-be25-c405a231915f service nova] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Updating instance_info_cache with network_info: [{"id": "e7aa8d2a-5703-4b7d-9953-bbc015d805d5", "address": "fa:16:3e:bb:59:cc", "network": {"id": "8b69c186-4a11-44d5-871d-fefb7cc45cb8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f81b609db0954f0a9e9474a2fd875f0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7aa8d2a-57", "ovs_interfaceid": "e7aa8d2a-5703-4b7d-9953-bbc015d805d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.203864] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c3113f-e7df-80b5-12f7-82acc1e8165c, 'name': SearchDatastore_Task, 'duration_secs': 0.009448} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.204210] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.204447] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.204688] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.204834] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.205025] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 798.205315] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-507adf20-f311-4c9e-a0a3-69a504f5f7f2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.215375] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 798.215451] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 798.216201] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a9f44fa-4140-455a-b289-c793ac6cfe12 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.222153] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 798.222153] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52350fde-8bb4-8ff6-a40d-16c9f08fcad3" [ 798.222153] env[61852]: _type = "Task" [ 798.222153] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.230438] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52350fde-8bb4-8ff6-a40d-16c9f08fcad3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.243848] env[61852]: DEBUG nova.network.neutron [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Successfully created port: 3d6f661c-c36f-4b84-b1ed-6b0388986c2d {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 798.386847] env[61852]: DEBUG nova.compute.manager [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 798.399235] env[61852]: DEBUG nova.network.neutron [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 798.460830] env[61852]: INFO nova.compute.manager [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Took 32.00 seconds to build instance. [ 798.550346] env[61852]: DEBUG oslo_concurrency.lockutils [req-f7f511bd-f808-47d1-8baf-1dd99ebc582b req-d6fd21ec-9755-4761-be25-c405a231915f service nova] Releasing lock "refresh_cache-f18906e9-67b3-4537-9169-9d275e2ec4e4" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.610521] env[61852]: DEBUG nova.network.neutron [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Updating instance_info_cache with network_info: [{"id": "9a926e1c-a6f1-408c-84f3-dfb08cb0464c", "address": "fa:16:3e:75:cf:e5", "network": {"id": "8b69c186-4a11-44d5-871d-fefb7cc45cb8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f81b609db0954f0a9e9474a2fd875f0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a926e1c-a6", "ovs_interfaceid": "9a926e1c-a6f1-408c-84f3-dfb08cb0464c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.729178] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279fdddd-e21e-4ad3-9cf8-e11483706d46 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.738059] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52350fde-8bb4-8ff6-a40d-16c9f08fcad3, 'name': SearchDatastore_Task, 'duration_secs': 0.014128} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.740531] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15d41885-fb9b-4c0b-b647-efb00e5e6b22 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.743489] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c307a245-dcd5-4fc2-a74d-ebecc4e169db {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.749361] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 798.749361] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52075636-17dd-8ed0-ba96-90b24d57b53f" [ 798.749361] env[61852]: _type = "Task" [ 798.749361] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.779586] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8210e722-ca5a-43cc-8488-2235cb1ebce4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.785693] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52075636-17dd-8ed0-ba96-90b24d57b53f, 'name': SearchDatastore_Task, 'duration_secs': 0.015734} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.786308] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.786537] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] f18906e9-67b3-4537-9169-9d275e2ec4e4/f18906e9-67b3-4537-9169-9d275e2ec4e4.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 798.786775] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b288dfa-bae0-4f3b-89d4-9e162808f337 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.792316] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d1dc78-daa1-40b1-94b9-9631d95dab47 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.796827] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 798.796827] env[61852]: value = "task-1292802" [ 798.796827] env[61852]: _type = "Task" [ 798.796827] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.808484] env[61852]: DEBUG nova.compute.provider_tree [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.810577] env[61852]: DEBUG nova.compute.manager [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Received event network-vif-plugged-9a926e1c-a6f1-408c-84f3-dfb08cb0464c {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 798.810794] env[61852]: DEBUG oslo_concurrency.lockutils [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] Acquiring lock "8897a654-6805-45b0-b12b-16f7981d33ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.810982] env[61852]: DEBUG oslo_concurrency.lockutils [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] Lock "8897a654-6805-45b0-b12b-16f7981d33ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.811162] env[61852]: DEBUG oslo_concurrency.lockutils [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] Lock "8897a654-6805-45b0-b12b-16f7981d33ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.811329] env[61852]: DEBUG nova.compute.manager [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] No waiting events found dispatching network-vif-plugged-9a926e1c-a6f1-408c-84f3-dfb08cb0464c {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 798.811494] env[61852]: WARNING nova.compute.manager [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Received unexpected event network-vif-plugged-9a926e1c-a6f1-408c-84f3-dfb08cb0464c for instance with vm_state building and task_state spawning. [ 798.811653] env[61852]: DEBUG nova.compute.manager [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Received event network-changed-9a926e1c-a6f1-408c-84f3-dfb08cb0464c {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 798.811805] env[61852]: DEBUG nova.compute.manager [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Refreshing instance network info cache due to event network-changed-9a926e1c-a6f1-408c-84f3-dfb08cb0464c. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 798.812056] env[61852]: DEBUG oslo_concurrency.lockutils [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] Acquiring lock "refresh_cache-8897a654-6805-45b0-b12b-16f7981d33ad" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.817336] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292802, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.964669] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9eac04e6-8549-418c-87ab-3559d84376b2 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "c94066d5-2e5f-4059-bdc5-385d517f1d84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.796s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.113553] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Releasing lock "refresh_cache-8897a654-6805-45b0-b12b-16f7981d33ad" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.114724] env[61852]: DEBUG nova.compute.manager [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Instance network_info: |[{"id": "9a926e1c-a6f1-408c-84f3-dfb08cb0464c", "address": "fa:16:3e:75:cf:e5", "network": {"id": "8b69c186-4a11-44d5-871d-fefb7cc45cb8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f81b609db0954f0a9e9474a2fd875f0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a926e1c-a6", "ovs_interfaceid": "9a926e1c-a6f1-408c-84f3-dfb08cb0464c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 799.114724] env[61852]: DEBUG oslo_concurrency.lockutils [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] Acquired lock "refresh_cache-8897a654-6805-45b0-b12b-16f7981d33ad" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.114927] env[61852]: DEBUG nova.network.neutron [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Refreshing network info cache for port 9a926e1c-a6f1-408c-84f3-dfb08cb0464c {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 799.115959] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:cf:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90328c7b-15c4-4742-805b-755248d67029', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a926e1c-a6f1-408c-84f3-dfb08cb0464c', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 799.123450] env[61852]: DEBUG oslo.service.loopingcall [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 799.124296] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 799.125130] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21c62ff5-a476-4696-acaa-e5c39c1dd23e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.145851] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 799.145851] env[61852]: value = "task-1292803" [ 799.145851] env[61852]: _type = "Task" [ 799.145851] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.156141] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292803, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.308259] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292802, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.313396] env[61852]: DEBUG nova.scheduler.client.report [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 799.396184] env[61852]: DEBUG nova.compute.manager [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 799.426893] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 799.427196] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 799.427388] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.427610] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 799.427777] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.427930] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 799.428154] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 799.428314] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 799.428483] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 799.428643] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 799.428813] env[61852]: DEBUG nova.virt.hardware [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 799.429689] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377e6481-d9fa-4655-a48b-45df1c9dea1f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.437799] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbe9fc1-bf0d-44aa-9872-9619eeb1313c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.469023] env[61852]: DEBUG nova.compute.manager [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 799.574630] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e3228e-797a-41e3-a7c2-1ce329df2c25 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.581053] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d62330e6-b153-42c8-bb11-afe937fc8cb6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Suspending the VM {{(pid=61852) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 799.581282] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-fa6ec593-c273-4b39-b2a5-e55235927a2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.587322] env[61852]: DEBUG oslo_vmware.api [None req-d62330e6-b153-42c8-bb11-afe937fc8cb6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 799.587322] env[61852]: value = "task-1292804" [ 799.587322] env[61852]: _type = "Task" [ 799.587322] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.594969] env[61852]: DEBUG oslo_vmware.api [None req-d62330e6-b153-42c8-bb11-afe937fc8cb6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292804, 'name': SuspendVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.658614] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292803, 'name': CreateVM_Task, 'duration_secs': 0.370776} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.661146] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 799.661845] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.662051] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.662389] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 799.663053] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f219b00-9b1f-4bf8-a7a0-acce3e38854c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.668630] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 799.668630] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52849113-1944-c753-f533-4688e8a2b459" [ 799.668630] env[61852]: _type = "Task" [ 799.668630] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.679556] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52849113-1944-c753-f533-4688e8a2b459, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.810335] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292802, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569293} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.810745] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] f18906e9-67b3-4537-9169-9d275e2ec4e4/f18906e9-67b3-4537-9169-9d275e2ec4e4.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 799.810843] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 799.811136] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db9d4f52-82c8-4f31-875b-2c42fe0e58b5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.819101] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.449s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.819458] env[61852]: DEBUG nova.compute.manager [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 799.822221] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 799.822221] env[61852]: value = "task-1292805" [ 799.822221] env[61852]: _type = "Task" [ 799.822221] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.822673] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 11.318s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.833224] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292805, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.885797] env[61852]: DEBUG nova.network.neutron [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Updated VIF entry in instance network info cache for port 9a926e1c-a6f1-408c-84f3-dfb08cb0464c. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 799.885797] env[61852]: DEBUG nova.network.neutron [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Updating instance_info_cache with network_info: [{"id": "9a926e1c-a6f1-408c-84f3-dfb08cb0464c", "address": "fa:16:3e:75:cf:e5", "network": {"id": "8b69c186-4a11-44d5-871d-fefb7cc45cb8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f81b609db0954f0a9e9474a2fd875f0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a926e1c-a6", "ovs_interfaceid": "9a926e1c-a6f1-408c-84f3-dfb08cb0464c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.994853] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.011602] env[61852]: DEBUG nova.compute.manager [req-07a1d536-4d12-4238-b734-2cb1ad6163c8 req-9ae94b57-ec25-408f-8e0c-2403cfa2c54b service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Received event network-vif-plugged-3d6f661c-c36f-4b84-b1ed-6b0388986c2d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 800.011602] env[61852]: DEBUG oslo_concurrency.lockutils [req-07a1d536-4d12-4238-b734-2cb1ad6163c8 req-9ae94b57-ec25-408f-8e0c-2403cfa2c54b service nova] Acquiring lock "b0d38886-aacb-4b7e-9530-c5891d9cee66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.011602] env[61852]: DEBUG oslo_concurrency.lockutils [req-07a1d536-4d12-4238-b734-2cb1ad6163c8 req-9ae94b57-ec25-408f-8e0c-2403cfa2c54b service nova] Lock "b0d38886-aacb-4b7e-9530-c5891d9cee66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.011602] env[61852]: DEBUG oslo_concurrency.lockutils [req-07a1d536-4d12-4238-b734-2cb1ad6163c8 req-9ae94b57-ec25-408f-8e0c-2403cfa2c54b service nova] Lock "b0d38886-aacb-4b7e-9530-c5891d9cee66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.012622] env[61852]: DEBUG nova.compute.manager [req-07a1d536-4d12-4238-b734-2cb1ad6163c8 req-9ae94b57-ec25-408f-8e0c-2403cfa2c54b service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] No waiting events found dispatching network-vif-plugged-3d6f661c-c36f-4b84-b1ed-6b0388986c2d {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 800.014127] env[61852]: WARNING nova.compute.manager [req-07a1d536-4d12-4238-b734-2cb1ad6163c8 req-9ae94b57-ec25-408f-8e0c-2403cfa2c54b service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Received unexpected event network-vif-plugged-3d6f661c-c36f-4b84-b1ed-6b0388986c2d for instance with vm_state building and task_state spawning. [ 800.097693] env[61852]: DEBUG oslo_vmware.api [None req-d62330e6-b153-42c8-bb11-afe937fc8cb6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292804, 'name': SuspendVM_Task} progress is 62%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.178961] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52849113-1944-c753-f533-4688e8a2b459, 'name': SearchDatastore_Task, 'duration_secs': 0.016459} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.179332] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.179601] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 800.179869] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.180061] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.180296] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 800.180563] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9aa52a56-9995-49ed-9ae9-cc5f6e4ee350 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.188612] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 800.188768] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 800.189462] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ecd825a-6002-4dee-b02c-75d12d70e69f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.194411] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 800.194411] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5267c75b-8809-cffc-2ee9-199fec466db7" [ 800.194411] env[61852]: _type = "Task" [ 800.194411] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.201832] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5267c75b-8809-cffc-2ee9-199fec466db7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.324263] env[61852]: DEBUG nova.compute.utils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 800.325772] env[61852]: DEBUG nova.compute.manager [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 800.325981] env[61852]: DEBUG nova.network.neutron [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 800.342016] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292805, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147725} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.342423] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 800.343259] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c081156b-4c93-4516-afa9-5d83fa2e7534 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.366986] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] f18906e9-67b3-4537-9169-9d275e2ec4e4/f18906e9-67b3-4537-9169-9d275e2ec4e4.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 800.367581] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-217b02d6-257a-43dc-98bf-2507537e5301 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.382738] env[61852]: DEBUG nova.policy [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47b9c159fc5547579b0e429b09d92760', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8dff8d945da948a89ee0fb2e2ddd0f9b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 800.388452] env[61852]: DEBUG oslo_concurrency.lockutils [req-2eb3185c-2670-4d32-a9d5-eb3831d51ff5 req-1b06af5f-0ea0-4d29-abc8-43fadefd13b7 service nova] Releasing lock "refresh_cache-8897a654-6805-45b0-b12b-16f7981d33ad" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.389984] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 800.389984] env[61852]: value = "task-1292806" [ 800.389984] env[61852]: _type = "Task" [ 800.389984] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.398945] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292806, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.581926] env[61852]: DEBUG nova.network.neutron [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Successfully updated port: 3d6f661c-c36f-4b84-b1ed-6b0388986c2d {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 800.601020] env[61852]: DEBUG oslo_vmware.api [None req-d62330e6-b153-42c8-bb11-afe937fc8cb6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292804, 'name': SuspendVM_Task, 'duration_secs': 0.664005} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.601237] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d62330e6-b153-42c8-bb11-afe937fc8cb6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Suspended the VM {{(pid=61852) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 800.601407] env[61852]: DEBUG nova.compute.manager [None req-d62330e6-b153-42c8-bb11-afe937fc8cb6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 800.602191] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9c7af5-2e27-4ff5-880e-9e4ca464dcff {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.648345] env[61852]: DEBUG nova.compute.manager [req-865f4ae9-541c-4a4e-abae-6bb9e8033d54 req-c81b20eb-0343-448c-bf90-23865f23117b service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Received event network-changed-3d6f661c-c36f-4b84-b1ed-6b0388986c2d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 800.648345] env[61852]: DEBUG nova.compute.manager [req-865f4ae9-541c-4a4e-abae-6bb9e8033d54 req-c81b20eb-0343-448c-bf90-23865f23117b service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Refreshing instance network info cache due to event network-changed-3d6f661c-c36f-4b84-b1ed-6b0388986c2d. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 800.648345] env[61852]: DEBUG oslo_concurrency.lockutils [req-865f4ae9-541c-4a4e-abae-6bb9e8033d54 req-c81b20eb-0343-448c-bf90-23865f23117b service nova] Acquiring lock "refresh_cache-b0d38886-aacb-4b7e-9530-c5891d9cee66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.648345] env[61852]: DEBUG oslo_concurrency.lockutils [req-865f4ae9-541c-4a4e-abae-6bb9e8033d54 req-c81b20eb-0343-448c-bf90-23865f23117b service nova] Acquired lock "refresh_cache-b0d38886-aacb-4b7e-9530-c5891d9cee66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.648345] env[61852]: DEBUG nova.network.neutron [req-865f4ae9-541c-4a4e-abae-6bb9e8033d54 req-c81b20eb-0343-448c-bf90-23865f23117b service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Refreshing network info cache for port 3d6f661c-c36f-4b84-b1ed-6b0388986c2d {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 800.656266] env[61852]: DEBUG nova.network.neutron [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Successfully created port: 7791e47c-6084-49c7-b1c2-b28459f8f408 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 800.704995] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5267c75b-8809-cffc-2ee9-199fec466db7, 'name': SearchDatastore_Task, 'duration_secs': 0.008968} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.705798] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-456bdd97-dfdf-4fd4-8537-a780d7bcde65 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.711274] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 800.711274] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5208e174-4479-c546-c83e-3b9740d7cc6c" [ 800.711274] env[61852]: _type = "Task" [ 800.711274] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.720548] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5208e174-4479-c546-c83e-3b9740d7cc6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.828745] env[61852]: DEBUG nova.compute.manager [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 800.857439] env[61852]: WARNING nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d48cefda-0b05-4ec0-8c1d-bc25cd491faf is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 800.857624] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d3922357-383f-4f7e-9c76-4eb688a092b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 800.857769] env[61852]: WARNING nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance cb50d964-5c0e-4cf3-b652-0f7b7a488f91 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 800.857904] env[61852]: WARNING nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 46ccab1f-b7af-49df-a38d-af1fa3bac486 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 800.858045] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance c94066d5-2e5f-4059-bdc5-385d517f1d84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 800.858173] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance f18906e9-67b3-4537-9169-9d275e2ec4e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 800.858289] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8897a654-6805-45b0-b12b-16f7981d33ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 800.858399] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance b0d38886-aacb-4b7e-9530-c5891d9cee66 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 800.858511] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8d733f93-7636-447b-a5d5-53c16c30061f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 800.898932] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292806, 'name': ReconfigVM_Task, 'duration_secs': 0.405413} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.899218] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Reconfigured VM instance instance-00000039 to attach disk [datastore2] f18906e9-67b3-4537-9169-9d275e2ec4e4/f18906e9-67b3-4537-9169-9d275e2ec4e4.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 800.899815] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e437b144-abc6-4950-80e8-65892e3541c9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.905683] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 800.905683] env[61852]: value = "task-1292807" [ 800.905683] env[61852]: _type = "Task" [ 800.905683] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.915439] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292807, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.081016] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquiring lock "refresh_cache-b0d38886-aacb-4b7e-9530-c5891d9cee66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.179508] env[61852]: DEBUG nova.network.neutron [req-865f4ae9-541c-4a4e-abae-6bb9e8033d54 req-c81b20eb-0343-448c-bf90-23865f23117b service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.223538] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5208e174-4479-c546-c83e-3b9740d7cc6c, 'name': SearchDatastore_Task, 'duration_secs': 0.009712} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.223838] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.224070] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 8897a654-6805-45b0-b12b-16f7981d33ad/8897a654-6805-45b0-b12b-16f7981d33ad.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 801.224331] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-769836af-4bdf-4d51-a3d4-ee6c4e6b16b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.231119] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 801.231119] env[61852]: value = "task-1292808" [ 801.231119] env[61852]: _type = "Task" [ 801.231119] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.238631] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.252887] env[61852]: DEBUG nova.network.neutron [req-865f4ae9-541c-4a4e-abae-6bb9e8033d54 req-c81b20eb-0343-448c-bf90-23865f23117b service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.361762] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance e265a4be-7b37-40b5-a199-42a7cd945f66 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.415692] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292807, 'name': Rename_Task, 'duration_secs': 0.13318} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.416762] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 801.416762] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b80a82a-d26b-4fe3-a4f3-2805b8ef5439 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.424446] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 801.424446] env[61852]: value = "task-1292809" [ 801.424446] env[61852]: _type = "Task" [ 801.424446] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.436119] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292809, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.742457] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499138} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.742768] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 8897a654-6805-45b0-b12b-16f7981d33ad/8897a654-6805-45b0-b12b-16f7981d33ad.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 801.743050] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 801.743390] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f12cada1-595b-4cc8-bff6-5a319e5ddac7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.750576] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 801.750576] env[61852]: value = "task-1292810" [ 801.750576] env[61852]: _type = "Task" [ 801.750576] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.755701] env[61852]: DEBUG oslo_concurrency.lockutils [req-865f4ae9-541c-4a4e-abae-6bb9e8033d54 req-c81b20eb-0343-448c-bf90-23865f23117b service nova] Releasing lock "refresh_cache-b0d38886-aacb-4b7e-9530-c5891d9cee66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.759019] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquired lock "refresh_cache-b0d38886-aacb-4b7e-9530-c5891d9cee66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.759279] env[61852]: DEBUG nova.network.neutron [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.760545] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.838303] env[61852]: DEBUG nova.compute.manager [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 801.859839] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 801.859839] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 801.859839] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 801.859839] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 801.860053] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 801.860053] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 801.860115] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 801.861055] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 801.861055] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 801.861055] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 801.861055] env[61852]: DEBUG nova.virt.hardware [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 801.861804] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1c76ec-e38b-4dc5-984c-58b188c46f55 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.865069] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 0ec1210f-7d42-4b71-abdc-9f818ffb91ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.875380] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3929cb12-27a8-4ef6-a637-c815ebbe34f5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.933983] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292809, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.076271] env[61852]: DEBUG nova.compute.manager [req-417dabd0-58ad-4a69-bd96-f9521312cada req-decb4a36-7fd6-4fa1-ba51-f9856bb3bfa7 service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Received event network-vif-plugged-7791e47c-6084-49c7-b1c2-b28459f8f408 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 802.076564] env[61852]: DEBUG oslo_concurrency.lockutils [req-417dabd0-58ad-4a69-bd96-f9521312cada req-decb4a36-7fd6-4fa1-ba51-f9856bb3bfa7 service nova] Acquiring lock "8d733f93-7636-447b-a5d5-53c16c30061f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.076839] env[61852]: DEBUG oslo_concurrency.lockutils [req-417dabd0-58ad-4a69-bd96-f9521312cada req-decb4a36-7fd6-4fa1-ba51-f9856bb3bfa7 service nova] Lock "8d733f93-7636-447b-a5d5-53c16c30061f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.077090] env[61852]: DEBUG oslo_concurrency.lockutils [req-417dabd0-58ad-4a69-bd96-f9521312cada req-decb4a36-7fd6-4fa1-ba51-f9856bb3bfa7 service nova] Lock "8d733f93-7636-447b-a5d5-53c16c30061f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.077499] env[61852]: DEBUG nova.compute.manager [req-417dabd0-58ad-4a69-bd96-f9521312cada req-decb4a36-7fd6-4fa1-ba51-f9856bb3bfa7 service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] No waiting events found dispatching network-vif-plugged-7791e47c-6084-49c7-b1c2-b28459f8f408 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 802.077499] env[61852]: WARNING nova.compute.manager [req-417dabd0-58ad-4a69-bd96-f9521312cada req-decb4a36-7fd6-4fa1-ba51-f9856bb3bfa7 service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Received unexpected event network-vif-plugged-7791e47c-6084-49c7-b1c2-b28459f8f408 for instance with vm_state building and task_state spawning. [ 802.172079] env[61852]: DEBUG nova.network.neutron [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Successfully updated port: 7791e47c-6084-49c7-b1c2-b28459f8f408 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 802.260196] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063689} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.260477] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 802.261287] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93231cce-f09c-4f3b-a2b0-41ba3f2038c4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.285062] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 8897a654-6805-45b0-b12b-16f7981d33ad/8897a654-6805-45b0-b12b-16f7981d33ad.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 802.285311] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77736250-6af8-48cf-9d2d-841f170061c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.299619] env[61852]: DEBUG nova.network.neutron [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.306070] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 802.306070] env[61852]: value = "task-1292811" [ 802.306070] env[61852]: _type = "Task" [ 802.306070] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.314451] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292811, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.373300] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 89970cff-cb49-4803-81a5-1675b0ea4aaf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 802.435570] env[61852]: DEBUG oslo_vmware.api [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292809, 'name': PowerOnVM_Task, 'duration_secs': 0.863883} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.435878] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 802.436134] env[61852]: INFO nova.compute.manager [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Took 7.95 seconds to spawn the instance on the hypervisor. [ 802.436355] env[61852]: DEBUG nova.compute.manager [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 802.437363] env[61852]: DEBUG nova.network.neutron [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Updating instance_info_cache with network_info: [{"id": "3d6f661c-c36f-4b84-b1ed-6b0388986c2d", "address": "fa:16:3e:72:65:f1", "network": {"id": "b6ddf5d7-3ac3-459c-8934-a3696cefc9a1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1501197891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfc80b8d433b4f6386a3c9a133f2164a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c964bdc6-fccc-40d9-bfe2-763b6f05a863", "external-id": "cl2-zone-376", "segmentation_id": 376, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d6f661c-c3", "ovs_interfaceid": "3d6f661c-c36f-4b84-b1ed-6b0388986c2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.439036] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c869d19d-e160-4488-8bee-faf8cfe5e3c2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.676078] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "refresh_cache-8d733f93-7636-447b-a5d5-53c16c30061f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.676078] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "refresh_cache-8d733f93-7636-447b-a5d5-53c16c30061f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.676078] env[61852]: DEBUG nova.network.neutron [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.817047] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292811, 'name': ReconfigVM_Task, 'duration_secs': 0.271409} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.817202] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 8897a654-6805-45b0-b12b-16f7981d33ad/8897a654-6805-45b0-b12b-16f7981d33ad.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 802.817773] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75fae6b0-6447-49a4-8401-057e6e55fb29 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.823462] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 802.823462] env[61852]: value = "task-1292812" [ 802.823462] env[61852]: _type = "Task" [ 802.823462] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.830958] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292812, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.876967] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance f8ebb1b7-39c6-486e-ab25-23080d858846 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 802.886081] env[61852]: DEBUG nova.compute.manager [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 802.887020] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82eda61-e25c-4d79-aa96-968e91fbd6a3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.943235] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Releasing lock "refresh_cache-b0d38886-aacb-4b7e-9530-c5891d9cee66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.943636] env[61852]: DEBUG nova.compute.manager [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Instance network_info: |[{"id": "3d6f661c-c36f-4b84-b1ed-6b0388986c2d", "address": "fa:16:3e:72:65:f1", "network": {"id": "b6ddf5d7-3ac3-459c-8934-a3696cefc9a1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1501197891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfc80b8d433b4f6386a3c9a133f2164a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c964bdc6-fccc-40d9-bfe2-763b6f05a863", "external-id": "cl2-zone-376", "segmentation_id": 376, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d6f661c-c3", "ovs_interfaceid": "3d6f661c-c36f-4b84-b1ed-6b0388986c2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 802.944417] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:65:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c964bdc6-fccc-40d9-bfe2-763b6f05a863', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d6f661c-c36f-4b84-b1ed-6b0388986c2d', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 802.952093] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Creating folder: Project (bfc80b8d433b4f6386a3c9a133f2164a). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 802.956345] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecc28144-350f-4417-bcaf-889415297b1f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.961707] env[61852]: INFO nova.compute.manager [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Took 31.94 seconds to build instance. [ 802.967841] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Created folder: Project (bfc80b8d433b4f6386a3c9a133f2164a) in parent group-v277280. [ 802.967841] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Creating folder: Instances. Parent ref: group-v277320. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 802.968021] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45d675c2-60ea-4933-9ce8-2a975e2dcb3c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.977677] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Created folder: Instances in parent group-v277320. [ 802.978137] env[61852]: DEBUG oslo.service.loopingcall [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.978241] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 802.978503] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78e89e5d-20b3-45b6-8282-7defc3958650 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.998785] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 802.998785] env[61852]: value = "task-1292815" [ 802.998785] env[61852]: _type = "Task" [ 802.998785] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.006778] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292815, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.209119] env[61852]: DEBUG nova.network.neutron [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.334221] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292812, 'name': Rename_Task, 'duration_secs': 0.156299} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.334668] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 803.334982] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5503cd1c-3709-4d9d-bbfd-74f9bc217288 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.338257] env[61852]: DEBUG nova.network.neutron [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Updating instance_info_cache with network_info: [{"id": "7791e47c-6084-49c7-b1c2-b28459f8f408", "address": "fa:16:3e:5f:7a:02", "network": {"id": "f986fa1f-0449-45a5-86ee-66a7fe44ea49", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-901829409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8dff8d945da948a89ee0fb2e2ddd0f9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7791e47c-60", "ovs_interfaceid": "7791e47c-6084-49c7-b1c2-b28459f8f408", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.341570] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 803.341570] env[61852]: value = "task-1292816" [ 803.341570] env[61852]: _type = "Task" [ 803.341570] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.350444] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292816, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.380420] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.399073] env[61852]: INFO nova.compute.manager [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] instance snapshotting [ 803.399311] env[61852]: WARNING nova.compute.manager [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 803.403359] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6389baaf-7818-40ee-9cdc-a3e85b9bb1a7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.422505] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eabab83-2314-4654-a1e2-348a5a4cde1c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.463262] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3b8e9146-fcfe-4f19-8ea8-ba92ba3be95d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "f18906e9-67b3-4537-9169-9d275e2ec4e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.417s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.508981] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292815, 'name': CreateVM_Task, 'duration_secs': 0.417451} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.509212] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 803.509866] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.510045] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.510370] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 803.510657] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8466813a-a7aa-4f5a-9ec6-7bcc4bba6153 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.515111] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for the task: (returnval){ [ 803.515111] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5225cd91-43ac-2a03-3f8a-4967729d58a0" [ 803.515111] env[61852]: _type = "Task" [ 803.515111] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.522678] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5225cd91-43ac-2a03-3f8a-4967729d58a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.840594] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "refresh_cache-8d733f93-7636-447b-a5d5-53c16c30061f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.841059] env[61852]: DEBUG nova.compute.manager [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Instance network_info: |[{"id": "7791e47c-6084-49c7-b1c2-b28459f8f408", "address": "fa:16:3e:5f:7a:02", "network": {"id": "f986fa1f-0449-45a5-86ee-66a7fe44ea49", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-901829409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8dff8d945da948a89ee0fb2e2ddd0f9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7791e47c-60", "ovs_interfaceid": "7791e47c-6084-49c7-b1c2-b28459f8f408", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 803.841537] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:7a:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '779b8e65-8b9e-427e-af08-910febd65bfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7791e47c-6084-49c7-b1c2-b28459f8f408', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.848934] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Creating folder: Project (8dff8d945da948a89ee0fb2e2ddd0f9b). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 803.849326] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b49fe5d4-aa61-4c68-8fdc-d27af2f9e074 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.859529] env[61852]: DEBUG oslo_vmware.api [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292816, 'name': PowerOnVM_Task, 'duration_secs': 0.456025} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.859804] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 803.860053] env[61852]: INFO nova.compute.manager [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Took 6.86 seconds to spawn the instance on the hypervisor. [ 803.860280] env[61852]: DEBUG nova.compute.manager [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 803.861049] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd33247-4c42-409b-a22a-fba1071968b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.864295] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Created folder: Project (8dff8d945da948a89ee0fb2e2ddd0f9b) in parent group-v277280. [ 803.864509] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Creating folder: Instances. Parent ref: group-v277323. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 803.865058] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-470ab20f-6f3a-4a96-8dc1-a57c1da48238 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.873606] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Created folder: Instances in parent group-v277323. [ 803.873832] env[61852]: DEBUG oslo.service.loopingcall [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.874267] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 803.874463] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f46c065-24b4-4144-a9de-5e4aab62ed68 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.889842] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance b0f8f7dd-e559-43be-b541-c3da48a07d68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.897196] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.897196] env[61852]: value = "task-1292819" [ 803.897196] env[61852]: _type = "Task" [ 803.897196] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.906388] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292819, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.934689] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Creating Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 803.934941] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0134a2c1-4536-452e-92cb-3497a11bb82f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.942102] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 803.942102] env[61852]: value = "task-1292820" [ 803.942102] env[61852]: _type = "Task" [ 803.942102] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.950759] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292820, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.965475] env[61852]: DEBUG nova.compute.manager [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 804.026164] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5225cd91-43ac-2a03-3f8a-4967729d58a0, 'name': SearchDatastore_Task, 'duration_secs': 0.008475} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.029024] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.029024] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.029024] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.029024] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.029497] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 804.029497] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-873df38b-e9ce-41f3-a065-4ba45dface9c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.036506] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 804.036726] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 804.037469] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fcc1a78-451a-495e-be47-35c74604980a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.042775] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for the task: (returnval){ [ 804.042775] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529d041b-a221-83dc-59d6-9c6374289f34" [ 804.042775] env[61852]: _type = "Task" [ 804.042775] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.051868] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529d041b-a221-83dc-59d6-9c6374289f34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.102461] env[61852]: DEBUG nova.compute.manager [req-b06cf847-7864-4215-8d6c-dac957dd1866 req-3670daa5-b1a1-435e-bdef-e3031f5227e6 service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Received event network-changed-7791e47c-6084-49c7-b1c2-b28459f8f408 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 804.102714] env[61852]: DEBUG nova.compute.manager [req-b06cf847-7864-4215-8d6c-dac957dd1866 req-3670daa5-b1a1-435e-bdef-e3031f5227e6 service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Refreshing instance network info cache due to event network-changed-7791e47c-6084-49c7-b1c2-b28459f8f408. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 804.103016] env[61852]: DEBUG oslo_concurrency.lockutils [req-b06cf847-7864-4215-8d6c-dac957dd1866 req-3670daa5-b1a1-435e-bdef-e3031f5227e6 service nova] Acquiring lock "refresh_cache-8d733f93-7636-447b-a5d5-53c16c30061f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.103128] env[61852]: DEBUG oslo_concurrency.lockutils [req-b06cf847-7864-4215-8d6c-dac957dd1866 req-3670daa5-b1a1-435e-bdef-e3031f5227e6 service nova] Acquired lock "refresh_cache-8d733f93-7636-447b-a5d5-53c16c30061f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.103319] env[61852]: DEBUG nova.network.neutron [req-b06cf847-7864-4215-8d6c-dac957dd1866 req-3670daa5-b1a1-435e-bdef-e3031f5227e6 service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Refreshing network info cache for port 7791e47c-6084-49c7-b1c2-b28459f8f408 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 804.383891] env[61852]: INFO nova.compute.manager [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Took 28.85 seconds to build instance. [ 804.392712] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 804.408047] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292819, 'name': CreateVM_Task, 'duration_secs': 0.30648} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.408190] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 804.408948] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.409158] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.409537] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 804.409816] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07de8030-8b62-43f6-aea0-b9f8147c16c6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.415110] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 804.415110] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d8eb97-172c-a45e-4b40-9f9847462b11" [ 804.415110] env[61852]: _type = "Task" [ 804.415110] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.423660] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d8eb97-172c-a45e-4b40-9f9847462b11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.451722] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292820, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.487923] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.555098] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529d041b-a221-83dc-59d6-9c6374289f34, 'name': SearchDatastore_Task, 'duration_secs': 0.008516} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.555980] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-288f37a5-1361-4c0f-bd0e-bb5920ac835f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.561643] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for the task: (returnval){ [ 804.561643] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233216f-fba9-379f-0257-b577b9d05fb4" [ 804.561643] env[61852]: _type = "Task" [ 804.561643] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.569142] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233216f-fba9-379f-0257-b577b9d05fb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.800411] env[61852]: DEBUG nova.network.neutron [req-b06cf847-7864-4215-8d6c-dac957dd1866 req-3670daa5-b1a1-435e-bdef-e3031f5227e6 service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Updated VIF entry in instance network info cache for port 7791e47c-6084-49c7-b1c2-b28459f8f408. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 804.800807] env[61852]: DEBUG nova.network.neutron [req-b06cf847-7864-4215-8d6c-dac957dd1866 req-3670daa5-b1a1-435e-bdef-e3031f5227e6 service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Updating instance_info_cache with network_info: [{"id": "7791e47c-6084-49c7-b1c2-b28459f8f408", "address": "fa:16:3e:5f:7a:02", "network": {"id": "f986fa1f-0449-45a5-86ee-66a7fe44ea49", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-901829409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8dff8d945da948a89ee0fb2e2ddd0f9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7791e47c-60", "ovs_interfaceid": "7791e47c-6084-49c7-b1c2-b28459f8f408", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.886259] env[61852]: DEBUG oslo_concurrency.lockutils [None req-14fb9cdc-fa1f-4a81-98e2-0542d91b67a1 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "8897a654-6805-45b0-b12b-16f7981d33ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.593s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.895813] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d93b8055-1eb2-4368-a051-289dc5a9d0ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 804.925555] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d8eb97-172c-a45e-4b40-9f9847462b11, 'name': SearchDatastore_Task, 'duration_secs': 0.010175} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.925790] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.926039] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.926265] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.953866] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292820, 'name': CreateSnapshot_Task, 'duration_secs': 0.633272} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.953866] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Created Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 804.954303] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3159bf4e-8a73-42a5-a138-4a75f8b57ed3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.072400] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233216f-fba9-379f-0257-b577b9d05fb4, 'name': SearchDatastore_Task, 'duration_secs': 0.009881} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.072709] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.073055] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] b0d38886-aacb-4b7e-9530-c5891d9cee66/b0d38886-aacb-4b7e-9530-c5891d9cee66.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 805.073357] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.073545] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 805.073939] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c04fdb6-b999-4618-9042-f4fecb3e097e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.075721] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f129a7f-48ee-4472-b8bb-36edc7daa241 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.083059] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for the task: (returnval){ [ 805.083059] env[61852]: value = "task-1292821" [ 805.083059] env[61852]: _type = "Task" [ 805.083059] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.085931] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 805.086118] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 805.087106] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a58f694-f3c3-419b-81ef-4b512d101944 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.092284] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.095164] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 805.095164] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c5e8e2-8e8d-f022-3401-91fe477719eb" [ 805.095164] env[61852]: _type = "Task" [ 805.095164] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.102297] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c5e8e2-8e8d-f022-3401-91fe477719eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.306604] env[61852]: DEBUG oslo_concurrency.lockutils [req-b06cf847-7864-4215-8d6c-dac957dd1866 req-3670daa5-b1a1-435e-bdef-e3031f5227e6 service nova] Releasing lock "refresh_cache-8d733f93-7636-447b-a5d5-53c16c30061f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.388979] env[61852]: DEBUG nova.compute.manager [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 805.398611] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 988c0a5c-b84d-44cf-9068-defd7132b0c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 805.473026] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Creating linked-clone VM from snapshot {{(pid=61852) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 805.473417] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ad2b7653-b1ed-438a-982b-34913540f98e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.482839] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 805.482839] env[61852]: value = "task-1292822" [ 805.482839] env[61852]: _type = "Task" [ 805.482839] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.490717] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292822, 'name': CloneVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.592624] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292821, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.604351] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c5e8e2-8e8d-f022-3401-91fe477719eb, 'name': SearchDatastore_Task, 'duration_secs': 0.012169} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.605139] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36b46696-bc68-4e1f-affa-7ebc07c3a55b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.610410] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 805.610410] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ff334c-c1bf-ee4f-01a4-848794d5ddc7" [ 805.610410] env[61852]: _type = "Task" [ 805.610410] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.617920] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ff334c-c1bf-ee4f-01a4-848794d5ddc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.811668] env[61852]: DEBUG nova.compute.manager [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 805.813044] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3703081-e63d-4a3d-8027-b62595dae742 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.901385] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance f48b40ab-23f2-4071-8168-e7e2411ad64d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 805.913049] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.992595] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292822, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.092378] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.947354} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.092658] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] b0d38886-aacb-4b7e-9530-c5891d9cee66/b0d38886-aacb-4b7e-9530-c5891d9cee66.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 806.092868] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 806.093172] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a9a1630-d4cf-4e6a-be3d-cd04b541624c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.098990] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for the task: (returnval){ [ 806.098990] env[61852]: value = "task-1292823" [ 806.098990] env[61852]: _type = "Task" [ 806.098990] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.107423] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292823, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.119248] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ff334c-c1bf-ee4f-01a4-848794d5ddc7, 'name': SearchDatastore_Task, 'duration_secs': 0.01782} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.119500] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.119758] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 8d733f93-7636-447b-a5d5-53c16c30061f/8d733f93-7636-447b-a5d5-53c16c30061f.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 806.120011] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca65ba42-fc41-49b3-9ec3-dd725177e113 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.126451] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 806.126451] env[61852]: value = "task-1292824" [ 806.126451] env[61852]: _type = "Task" [ 806.126451] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.133720] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292824, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.327888] env[61852]: INFO nova.compute.manager [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] instance snapshotting [ 806.330836] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0535be51-f336-4c96-b13e-3e891c85f448 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.349222] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0687cbaa-5ca3-4fac-9ae8-61ea793d5370 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.404405] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance aeaa2828-6d83-4b26-bd1c-5f654c70713f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 806.494296] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292822, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.609429] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292823, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062874} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.609667] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 806.611266] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d284118a-7998-4b4a-90d9-cb1f13bd0e4e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.634278] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] b0d38886-aacb-4b7e-9530-c5891d9cee66/b0d38886-aacb-4b7e-9530-c5891d9cee66.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 806.634700] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b06a95ce-464f-4937-bab8-bbd502b845cf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.662928] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292824, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.664669] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for the task: (returnval){ [ 806.664669] env[61852]: value = "task-1292825" [ 806.664669] env[61852]: _type = "Task" [ 806.664669] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.673606] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292825, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.860046] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Creating Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 806.860770] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-364ff961-687a-482e-a986-b3ad55981d98 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.868165] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 806.868165] env[61852]: value = "task-1292826" [ 806.868165] env[61852]: _type = "Task" [ 806.868165] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.877053] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292826, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.908341] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 21d74604-6a64-44ee-a012-ebff7166853e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 806.993945] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292822, 'name': CloneVM_Task, 'duration_secs': 1.373116} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.995031] env[61852]: INFO nova.virt.vmwareapi.vmops [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Created linked-clone VM from snapshot [ 806.995031] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae51032-02cb-4a95-80b3-7c5b4dd2832d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.002540] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Uploading image fb0a83e2-a324-456f-8eb2-051907376bce {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 807.023460] env[61852]: DEBUG oslo_vmware.rw_handles [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 807.023460] env[61852]: value = "vm-277327" [ 807.023460] env[61852]: _type = "VirtualMachine" [ 807.023460] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 807.023875] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-755bd2fa-55ad-4ba2-a042-a4e9c6469cfb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.030682] env[61852]: DEBUG oslo_vmware.rw_handles [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lease: (returnval){ [ 807.030682] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233ad17-24e3-5a30-8153-16773ba98a59" [ 807.030682] env[61852]: _type = "HttpNfcLease" [ 807.030682] env[61852]: } obtained for exporting VM: (result){ [ 807.030682] env[61852]: value = "vm-277327" [ 807.030682] env[61852]: _type = "VirtualMachine" [ 807.030682] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 807.030969] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the lease: (returnval){ [ 807.030969] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233ad17-24e3-5a30-8153-16773ba98a59" [ 807.030969] env[61852]: _type = "HttpNfcLease" [ 807.030969] env[61852]: } to be ready. {{(pid=61852) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 807.037459] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 807.037459] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233ad17-24e3-5a30-8153-16773ba98a59" [ 807.037459] env[61852]: _type = "HttpNfcLease" [ 807.037459] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 807.144392] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292824, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657724} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.144673] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 8d733f93-7636-447b-a5d5-53c16c30061f/8d733f93-7636-447b-a5d5-53c16c30061f.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 807.144885] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 807.145151] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8f30e75-53c2-4260-a97a-ecee3b72b0c9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.152154] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 807.152154] env[61852]: value = "task-1292828" [ 807.152154] env[61852]: _type = "Task" [ 807.152154] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.159612] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292828, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.173579] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292825, 'name': ReconfigVM_Task, 'duration_secs': 0.318894} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.173839] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Reconfigured VM instance instance-0000003b to attach disk [datastore1] b0d38886-aacb-4b7e-9530-c5891d9cee66/b0d38886-aacb-4b7e-9530-c5891d9cee66.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 807.174458] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca037a55-b135-4181-bc44-7e711edeae72 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.180196] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for the task: (returnval){ [ 807.180196] env[61852]: value = "task-1292829" [ 807.180196] env[61852]: _type = "Task" [ 807.180196] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.188264] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292829, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.377691] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292826, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.411684] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8d8679db-eb9d-45c1-b053-70378f58e273 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 807.412047] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 807.412213] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 807.539945] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 807.539945] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233ad17-24e3-5a30-8153-16773ba98a59" [ 807.539945] env[61852]: _type = "HttpNfcLease" [ 807.539945] env[61852]: } is ready. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 807.540633] env[61852]: DEBUG oslo_vmware.rw_handles [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 807.540633] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233ad17-24e3-5a30-8153-16773ba98a59" [ 807.540633] env[61852]: _type = "HttpNfcLease" [ 807.540633] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 807.541023] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a6b3c1-2fe6-4feb-bc70-93936ab2ef02 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.551036] env[61852]: DEBUG oslo_vmware.rw_handles [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ed2c6e-44c9-7bd6-269f-29879ffafc8b/disk-0.vmdk from lease info. {{(pid=61852) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 807.551223] env[61852]: DEBUG oslo_vmware.rw_handles [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ed2c6e-44c9-7bd6-269f-29879ffafc8b/disk-0.vmdk for reading. {{(pid=61852) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 807.663512] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292828, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07392} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.663787] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 807.664591] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15672e2e-7445-47af-80c7-6d1bd8fa865f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.685805] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 8d733f93-7636-447b-a5d5-53c16c30061f/8d733f93-7636-447b-a5d5-53c16c30061f.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 807.688283] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d304b1d-1db0-4663-9bbe-81e49563654e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.710319] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292829, 'name': Rename_Task, 'duration_secs': 0.140502} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.711422] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 807.711724] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 807.711724] env[61852]: value = "task-1292830" [ 807.711724] env[61852]: _type = "Task" [ 807.711724] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.714023] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2558c88a-f27b-4bbf-8309-59443fae624f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.724224] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292830, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.725276] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for the task: (returnval){ [ 807.725276] env[61852]: value = "task-1292831" [ 807.725276] env[61852]: _type = "Task" [ 807.725276] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.734493] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292831, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.750364] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9571bfab-7613-4122-bcb2-addd03b6597a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.758854] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c351ff-3c1f-4f47-9163-05279e721b60 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.765346] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e3e0a4df-346f-474e-803f-ffc68cca054b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.797049] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fe51a4-60c0-4ab8-9713-594719c5d320 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.808009] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9687a554-eb15-4702-8e66-0bc0f0109213 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.821443] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 807.878478] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292826, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.227622] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292830, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.238603] env[61852]: DEBUG oslo_vmware.api [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292831, 'name': PowerOnVM_Task, 'duration_secs': 0.503748} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.238987] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 808.239310] env[61852]: INFO nova.compute.manager [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Took 8.84 seconds to spawn the instance on the hypervisor. [ 808.239563] env[61852]: DEBUG nova.compute.manager [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 808.240589] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153224fa-114f-4033-ab6a-82054e3ef4ea {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.366832] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 83 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 808.367449] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 83 to 84 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 808.367748] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 808.381275] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292826, 'name': CreateSnapshot_Task, 'duration_secs': 1.361179} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.381728] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Created Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 808.382746] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6362d49-9c0b-4264-bd50-1d9afd3aa0b8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.726436] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292830, 'name': ReconfigVM_Task, 'duration_secs': 0.606576} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.726934] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 8d733f93-7636-447b-a5d5-53c16c30061f/8d733f93-7636-447b-a5d5-53c16c30061f.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 808.728544] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63ceaa32-08e4-4828-8853-5826d80729fb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.735911] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 808.735911] env[61852]: value = "task-1292832" [ 808.735911] env[61852]: _type = "Task" [ 808.735911] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.745214] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292832, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.760272] env[61852]: INFO nova.compute.manager [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Took 31.24 seconds to build instance. [ 808.873303] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 808.873437] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.051s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.874154] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.366s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.875962] env[61852]: INFO nova.compute.claims [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.878720] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 808.878932] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Cleaning up deleted instances {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 808.902607] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Creating linked-clone VM from snapshot {{(pid=61852) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 808.904375] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6a2fa959-b8f9-4f4f-ab17-44ec57315aff {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.913674] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 808.913674] env[61852]: value = "task-1292833" [ 808.913674] env[61852]: _type = "Task" [ 808.913674] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.923194] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292833, 'name': CloneVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.246481] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292832, 'name': Rename_Task, 'duration_secs': 0.254496} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.246850] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 809.247057] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1954800-04ba-47cc-b411-25700d9962c4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.255844] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 809.255844] env[61852]: value = "task-1292834" [ 809.255844] env[61852]: _type = "Task" [ 809.255844] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.264212] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a0f62a7e-c852-4b4e-902e-616975f0b1ff tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lock "b0d38886-aacb-4b7e-9530-c5891d9cee66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.798s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.264494] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292834, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.362273] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquiring lock "b0d38886-aacb-4b7e-9530-c5891d9cee66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.362573] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lock "b0d38886-aacb-4b7e-9530-c5891d9cee66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.362909] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquiring lock "b0d38886-aacb-4b7e-9530-c5891d9cee66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.363308] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lock "b0d38886-aacb-4b7e-9530-c5891d9cee66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.363626] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lock "b0d38886-aacb-4b7e-9530-c5891d9cee66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.366114] env[61852]: INFO nova.compute.manager [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Terminating instance [ 809.368210] env[61852]: DEBUG nova.compute.manager [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 809.368469] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 809.369347] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e9d1f6-578b-484e-83a9-79dd5cb180f1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.378134] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 809.378440] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42789bb8-217c-4133-973e-f183552d8cb7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.385442] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] There are 4 instances to clean {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 809.385730] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: be44214d-72dc-4517-a91a-7f659b5aa897] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 809.389571] env[61852]: DEBUG oslo_vmware.api [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for the task: (returnval){ [ 809.389571] env[61852]: value = "task-1292835" [ 809.389571] env[61852]: _type = "Task" [ 809.389571] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.400052] env[61852]: DEBUG oslo_vmware.api [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292835, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.423573] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292833, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.765531] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292834, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.767056] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 809.894886] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 694889e8-200e-454c-9e87-60521dd044d9] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 809.908976] env[61852]: DEBUG oslo_vmware.api [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292835, 'name': PowerOffVM_Task, 'duration_secs': 0.210941} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.909322] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 809.909506] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 809.909764] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a395da77-1cb8-400e-b45b-96b8d2196946 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.927070] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292833, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.982429] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 809.982656] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 809.982847] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Deleting the datastore file [datastore1] b0d38886-aacb-4b7e-9530-c5891d9cee66 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 809.983238] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ddbd04c2-6cef-44c0-9e4d-618b11e05461 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.990218] env[61852]: DEBUG oslo_vmware.api [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for the task: (returnval){ [ 809.990218] env[61852]: value = "task-1292837" [ 809.990218] env[61852]: _type = "Task" [ 809.990218] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.001978] env[61852]: DEBUG oslo_vmware.api [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.210253] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f15d00-9fca-4473-b660-b12a3c9b0eaa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.218206] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956e8ce9-191d-4c39-a1e4-f1e65a4701a2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.252169] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fe2d60-4c5b-430f-9024-a21a6879c801 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.263598] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecdc1cb9-68c5-4615-9c63-593cb4a65c18 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.272362] env[61852]: DEBUG oslo_vmware.api [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292834, 'name': PowerOnVM_Task, 'duration_secs': 0.865767} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.273016] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 810.273240] env[61852]: INFO nova.compute.manager [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Took 8.43 seconds to spawn the instance on the hypervisor. [ 810.273411] env[61852]: DEBUG nova.compute.manager [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 810.274179] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816ae3eb-16cc-4a49-a743-366299b83b55 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.286423] env[61852]: DEBUG nova.compute.provider_tree [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.301830] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.403337] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 4ce41dca-63c6-447d-9c0a-00f9966e0093] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 810.425228] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292833, 'name': CloneVM_Task} progress is 95%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.500478] env[61852]: DEBUG oslo_vmware.api [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Task: {'id': task-1292837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.323071} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.500752] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 810.500942] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 810.501188] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 810.501421] env[61852]: INFO nova.compute.manager [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Took 1.13 seconds to destroy the instance on the hypervisor. [ 810.501633] env[61852]: DEBUG oslo.service.loopingcall [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 810.501835] env[61852]: DEBUG nova.compute.manager [-] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 810.501930] env[61852]: DEBUG nova.network.neutron [-] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.789249] env[61852]: DEBUG nova.compute.manager [req-1f97b361-1da9-44fb-ac7a-da8b08dc7545 req-391a6e19-7b60-4616-a50e-606afabb2cf8 service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Received event network-vif-deleted-3d6f661c-c36f-4b84-b1ed-6b0388986c2d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 810.789456] env[61852]: INFO nova.compute.manager [req-1f97b361-1da9-44fb-ac7a-da8b08dc7545 req-391a6e19-7b60-4616-a50e-606afabb2cf8 service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Neutron deleted interface 3d6f661c-c36f-4b84-b1ed-6b0388986c2d; detaching it from the instance and deleting it from the info cache [ 810.789662] env[61852]: DEBUG nova.network.neutron [req-1f97b361-1da9-44fb-ac7a-da8b08dc7545 req-391a6e19-7b60-4616-a50e-606afabb2cf8 service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.791401] env[61852]: DEBUG nova.scheduler.client.report [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 810.801775] env[61852]: INFO nova.compute.manager [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Took 29.81 seconds to build instance. [ 810.906572] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d75e131b-1933-4e1f-bcf1-62ed83779177] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 810.926100] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292833, 'name': CloneVM_Task} progress is 95%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.262857] env[61852]: DEBUG nova.network.neutron [-] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.295872] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04ac8e5f-4fc1-49c3-816f-79a2306a3f7f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.298821] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.299330] env[61852]: DEBUG nova.compute.manager [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 811.302051] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.767s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.303758] env[61852]: INFO nova.compute.claims [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.307779] env[61852]: DEBUG oslo_concurrency.lockutils [None req-42750323-5972-46e9-8a78-719bafe7058a tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "8d733f93-7636-447b-a5d5-53c16c30061f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.247s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.313265] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e7cddd-a368-4687-9cc5-fa9008a7e8a5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.345971] env[61852]: DEBUG nova.compute.manager [req-1f97b361-1da9-44fb-ac7a-da8b08dc7545 req-391a6e19-7b60-4616-a50e-606afabb2cf8 service nova] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Detach interface failed, port_id=3d6f661c-c36f-4b84-b1ed-6b0388986c2d, reason: Instance b0d38886-aacb-4b7e-9530-c5891d9cee66 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 811.409614] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.409887] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61852) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 811.426578] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292833, 'name': CloneVM_Task, 'duration_secs': 2.206341} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.427488] env[61852]: INFO nova.virt.vmwareapi.vmops [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Created linked-clone VM from snapshot [ 811.428285] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44ccf3f-cabd-4113-92d7-4c7fdfe147bf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.436346] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Uploading image e7fa1b16-39e9-4667-b21a-b76c52e00146 {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 811.456922] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 811.456922] env[61852]: value = "vm-277329" [ 811.456922] env[61852]: _type = "VirtualMachine" [ 811.456922] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 811.457213] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-22914079-9c89-4923-adc9-fc3c430e6ded {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.464566] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lease: (returnval){ [ 811.464566] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb29c4-869f-26ed-89cf-0b9ca3cec7c4" [ 811.464566] env[61852]: _type = "HttpNfcLease" [ 811.464566] env[61852]: } obtained for exporting VM: (result){ [ 811.464566] env[61852]: value = "vm-277329" [ 811.464566] env[61852]: _type = "VirtualMachine" [ 811.464566] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 811.464816] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the lease: (returnval){ [ 811.464816] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb29c4-869f-26ed-89cf-0b9ca3cec7c4" [ 811.464816] env[61852]: _type = "HttpNfcLease" [ 811.464816] env[61852]: } to be ready. {{(pid=61852) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 811.471925] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 811.471925] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb29c4-869f-26ed-89cf-0b9ca3cec7c4" [ 811.471925] env[61852]: _type = "HttpNfcLease" [ 811.471925] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 811.766172] env[61852]: INFO nova.compute.manager [-] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Took 1.26 seconds to deallocate network for instance. [ 811.809776] env[61852]: DEBUG nova.compute.utils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 811.811442] env[61852]: DEBUG nova.compute.manager [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 811.811620] env[61852]: DEBUG nova.network.neutron [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 811.813707] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 811.832435] env[61852]: INFO nova.compute.manager [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Rescuing [ 811.832886] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "refresh_cache-8d733f93-7636-447b-a5d5-53c16c30061f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.834034] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "refresh_cache-8d733f93-7636-447b-a5d5-53c16c30061f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.834034] env[61852]: DEBUG nova.network.neutron [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.869138] env[61852]: DEBUG nova.policy [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81f93d952cce4f6a8cd87f87696786aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0c7f48c684044564b9081d6bc04c7e29', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 811.911908] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.973544] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 811.973544] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb29c4-869f-26ed-89cf-0b9ca3cec7c4" [ 811.973544] env[61852]: _type = "HttpNfcLease" [ 811.973544] env[61852]: } is ready. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 811.973844] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 811.973844] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb29c4-869f-26ed-89cf-0b9ca3cec7c4" [ 811.973844] env[61852]: _type = "HttpNfcLease" [ 811.973844] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 811.974876] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3e5bd8-6e52-4e67-bd39-22d81bf2c8e0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.982540] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c91179-7a8f-94d6-4e1e-2200793b25ba/disk-0.vmdk from lease info. {{(pid=61852) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 811.982730] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c91179-7a8f-94d6-4e1e-2200793b25ba/disk-0.vmdk for reading. {{(pid=61852) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 812.098299] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-84787e05-f0e4-4e5e-99ed-134d10b83e88 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.144101] env[61852]: DEBUG nova.network.neutron [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Successfully created port: 819604bb-f7cf-449f-8681-bf4901e756e1 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.274450] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.324881] env[61852]: DEBUG nova.compute.manager [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 812.356028] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.404479] env[61852]: DEBUG nova.network.neutron [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Successfully created port: 92411dba-21d2-474e-9b4f-cda4bea94122 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.674702] env[61852]: DEBUG nova.network.neutron [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Updating instance_info_cache with network_info: [{"id": "7791e47c-6084-49c7-b1c2-b28459f8f408", "address": "fa:16:3e:5f:7a:02", "network": {"id": "f986fa1f-0449-45a5-86ee-66a7fe44ea49", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-901829409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8dff8d945da948a89ee0fb2e2ddd0f9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7791e47c-60", "ovs_interfaceid": "7791e47c-6084-49c7-b1c2-b28459f8f408", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.791018] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac289fd5-9a2c-4914-b61b-c2092d882a33 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.797380] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065f01bd-c5e4-4238-8957-f1e4d77746a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.835360] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c641a629-f20d-4aae-9e06-8d771626a378 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.848029] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c66b003-a7d4-4fe2-8a10-063964d9cf1c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.863168] env[61852]: DEBUG nova.compute.provider_tree [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.901426] env[61852]: DEBUG nova.network.neutron [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Successfully created port: 8dd44cd3-3b97-484e-b3ed-ddb88f224343 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 813.177803] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "refresh_cache-8d733f93-7636-447b-a5d5-53c16c30061f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.343134] env[61852]: DEBUG nova.compute.manager [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 813.365895] env[61852]: DEBUG nova.scheduler.client.report [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 813.872061] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.872234] env[61852]: DEBUG nova.compute.manager [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 813.875925] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.255s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.877496] env[61852]: INFO nova.compute.claims [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.378055] env[61852]: DEBUG nova.compute.utils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.383268] env[61852]: DEBUG nova.compute.manager [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 814.383518] env[61852]: DEBUG nova.network.neutron [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.428511] env[61852]: DEBUG nova.policy [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8780b1622ce4d98a88fa92bee3e4bd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d65efc960c14799bcf1b26ecdf9c912', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 814.455499] env[61852]: DEBUG nova.network.neutron [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Successfully updated port: 819604bb-f7cf-449f-8681-bf4901e756e1 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 814.672198] env[61852]: DEBUG nova.network.neutron [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Successfully created port: 72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.887179] env[61852]: DEBUG nova.compute.manager [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 815.169961] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c0c53c-0b2a-4cc7-85af-64ed50150dc2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.178634] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9192f87d-ee69-4e7c-89e0-0d893b1e5bbf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.216158] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79289cae-173f-4039-8791-31fb0a754b5c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.223788] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19d9d31-6a11-4db0-9f15-d19793e49808 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.238218] env[61852]: DEBUG nova.compute.provider_tree [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.741279] env[61852]: DEBUG nova.scheduler.client.report [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 815.897464] env[61852]: DEBUG nova.compute.manager [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 816.167833] env[61852]: DEBUG nova.network.neutron [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Successfully updated port: 72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.246202] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.246638] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 816.249297] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.955s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.250737] env[61852]: INFO nova.compute.claims [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.445006] env[61852]: DEBUG nova.network.neutron [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Successfully updated port: 92411dba-21d2-474e-9b4f-cda4bea94122 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.670128] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.670441] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquired lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.670661] env[61852]: DEBUG nova.network.neutron [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 816.755915] env[61852]: DEBUG nova.compute.utils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 816.760885] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 816.761086] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.801552] env[61852]: DEBUG nova.policy [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4d6297b7026476c9753affb38106e9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6a9b0e24a2545cf877ccef6701fcac6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 817.045267] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Successfully created port: 145feb94-c188-4d2a-a614-870d122d1174 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.218746] env[61852]: DEBUG nova.network.neutron [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.264628] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 817.417555] env[61852]: DEBUG nova.network.neutron [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Updating instance_info_cache with network_info: [{"id": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "address": "fa:16:3e:74:13:9d", "network": {"id": "dc45adde-e4fb-4495-a4c3-3373c99a2eb7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1925353850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d65efc960c14799bcf1b26ecdf9c912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d13320-e5", "ovs_interfaceid": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.448885] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 817.449166] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 817.449348] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.449506] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 817.450570] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.450570] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 817.450570] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 817.450570] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 817.450570] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 817.450764] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 817.450800] env[61852]: DEBUG nova.virt.hardware [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 817.452982] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f34aca6-f8ef-48e2-9363-b039413de39b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.462446] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 817.462682] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 817.462845] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.463047] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 817.463293] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.463542] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 817.463748] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 817.463953] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 817.464275] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 817.464505] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 817.464759] env[61852]: DEBUG nova.virt.hardware [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 817.467169] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94dae865-2dcf-407d-908d-b5df903e6e6e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.476756] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7a23b4-4990-4616-ac8b-be5470b69072 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.485698] env[61852]: DEBUG oslo_vmware.rw_handles [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ed2c6e-44c9-7bd6-269f-29879ffafc8b/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 817.486905] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5f9a21-345a-4f19-a2b5-beaaca39bc47 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.492632] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dde12b0-5450-4f05-827e-8fc13bac6732 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.508883] env[61852]: DEBUG oslo_vmware.rw_handles [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ed2c6e-44c9-7bd6-269f-29879ffafc8b/disk-0.vmdk is in state: ready. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 817.509104] env[61852]: ERROR oslo_vmware.rw_handles [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ed2c6e-44c9-7bd6-269f-29879ffafc8b/disk-0.vmdk due to incomplete transfer. [ 817.516865] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-921d7230-8920-4042-a54a-07064f6c402c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.527756] env[61852]: DEBUG oslo_vmware.rw_handles [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ed2c6e-44c9-7bd6-269f-29879ffafc8b/disk-0.vmdk. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 817.527756] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Uploaded image fb0a83e2-a324-456f-8eb2-051907376bce to the Glance image server {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 817.529898] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Destroying the VM {{(pid=61852) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 817.532499] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-21de03ec-1928-4381-bbbf-0028b528355b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.539608] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 817.539608] env[61852]: value = "task-1292839" [ 817.539608] env[61852]: _type = "Task" [ 817.539608] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.549937] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292839, 'name': Destroy_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.624693] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bf0e55-ef59-4fca-9a11-3a0be851e5e9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.632691] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1795ab-7eb5-4a55-bdd6-d1cd45d6ad44 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.662562] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675e0151-47bc-4604-83a8-cce31acf220a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.670784] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6d6902-72ef-4c9c-81e6-49768cc5cfee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.684415] env[61852]: DEBUG nova.compute.provider_tree [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.920252] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Releasing lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.920535] env[61852]: DEBUG nova.compute.manager [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Instance network_info: |[{"id": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "address": "fa:16:3e:74:13:9d", "network": {"id": "dc45adde-e4fb-4495-a4c3-3373c99a2eb7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1925353850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d65efc960c14799bcf1b26ecdf9c912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d13320-e5", "ovs_interfaceid": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 817.920936] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:13:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72d13320-e518-4f1a-98b0-cb48bcb2fe11', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 817.929398] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Creating folder: Project (6d65efc960c14799bcf1b26ecdf9c912). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 817.929755] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-acf6eb04-66a7-485a-97af-51735d6a0935 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.941142] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Created folder: Project (6d65efc960c14799bcf1b26ecdf9c912) in parent group-v277280. [ 817.941427] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Creating folder: Instances. Parent ref: group-v277330. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 817.941710] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-855d4b34-01a1-4109-864e-a07ce37638ca {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.948840] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 817.949153] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f21dec4d-68b4-43cf-9f52-e96430f1c9c4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.952381] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Created folder: Instances in parent group-v277330. [ 817.952652] env[61852]: DEBUG oslo.service.loopingcall [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 817.953424] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 817.953617] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7d11d67-fd9d-49f9-addc-3340a36e7ee7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.973628] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 817.973628] env[61852]: value = "task-1292842" [ 817.973628] env[61852]: _type = "Task" [ 817.973628] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.979309] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 817.979309] env[61852]: value = "task-1292843" [ 817.979309] env[61852]: _type = "Task" [ 817.979309] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.984251] env[61852]: DEBUG nova.compute.manager [req-99cead7e-9dae-4390-b04e-0a88758840d3 req-30a4e40c-78e8-42d0-b320-4ad58312da9e service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received event network-vif-plugged-819604bb-f7cf-449f-8681-bf4901e756e1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 817.984482] env[61852]: DEBUG oslo_concurrency.lockutils [req-99cead7e-9dae-4390-b04e-0a88758840d3 req-30a4e40c-78e8-42d0-b320-4ad58312da9e service nova] Acquiring lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.984733] env[61852]: DEBUG oslo_concurrency.lockutils [req-99cead7e-9dae-4390-b04e-0a88758840d3 req-30a4e40c-78e8-42d0-b320-4ad58312da9e service nova] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.984930] env[61852]: DEBUG oslo_concurrency.lockutils [req-99cead7e-9dae-4390-b04e-0a88758840d3 req-30a4e40c-78e8-42d0-b320-4ad58312da9e service nova] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.985117] env[61852]: DEBUG nova.compute.manager [req-99cead7e-9dae-4390-b04e-0a88758840d3 req-30a4e40c-78e8-42d0-b320-4ad58312da9e service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] No waiting events found dispatching network-vif-plugged-819604bb-f7cf-449f-8681-bf4901e756e1 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 817.985288] env[61852]: WARNING nova.compute.manager [req-99cead7e-9dae-4390-b04e-0a88758840d3 req-30a4e40c-78e8-42d0-b320-4ad58312da9e service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received unexpected event network-vif-plugged-819604bb-f7cf-449f-8681-bf4901e756e1 for instance with vm_state building and task_state spawning. [ 817.985949] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.994657] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292843, 'name': CreateVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.000068] env[61852]: DEBUG nova.compute.manager [req-26b1b55d-6fda-4473-85df-f36a1e15a352 req-16e3eeaa-d70b-4400-9ea0-f462b94832d8 service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Received event network-vif-plugged-72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 818.000289] env[61852]: DEBUG oslo_concurrency.lockutils [req-26b1b55d-6fda-4473-85df-f36a1e15a352 req-16e3eeaa-d70b-4400-9ea0-f462b94832d8 service nova] Acquiring lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.000514] env[61852]: DEBUG oslo_concurrency.lockutils [req-26b1b55d-6fda-4473-85df-f36a1e15a352 req-16e3eeaa-d70b-4400-9ea0-f462b94832d8 service nova] Lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.000686] env[61852]: DEBUG oslo_concurrency.lockutils [req-26b1b55d-6fda-4473-85df-f36a1e15a352 req-16e3eeaa-d70b-4400-9ea0-f462b94832d8 service nova] Lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.000864] env[61852]: DEBUG nova.compute.manager [req-26b1b55d-6fda-4473-85df-f36a1e15a352 req-16e3eeaa-d70b-4400-9ea0-f462b94832d8 service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] No waiting events found dispatching network-vif-plugged-72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 818.001049] env[61852]: WARNING nova.compute.manager [req-26b1b55d-6fda-4473-85df-f36a1e15a352 req-16e3eeaa-d70b-4400-9ea0-f462b94832d8 service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Received unexpected event network-vif-plugged-72d13320-e518-4f1a-98b0-cb48bcb2fe11 for instance with vm_state building and task_state spawning. [ 818.050690] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292839, 'name': Destroy_Task} progress is 33%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.187827] env[61852]: DEBUG nova.scheduler.client.report [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 818.274969] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 818.303142] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 818.303702] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 818.304027] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.304440] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 818.304693] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.305138] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 818.305480] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 818.305795] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 818.306224] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 818.306526] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 818.306989] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 818.308215] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff84183-641b-4899-9584-de00a118fdad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.317392] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02014848-cc87-4497-8f18-dfac7795823f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.485308] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292842, 'name': PowerOffVM_Task, 'duration_secs': 0.240814} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.488780] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 818.489740] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ee203e-e7d2-4d7b-bcc5-d076f4706a19 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.512203] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292843, 'name': CreateVM_Task, 'duration_secs': 0.459722} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.512995] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 818.513858] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72eaa8dc-95d6-426a-afbf-4b6b6fa16666 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.517337] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.517550] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.517921] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 818.518240] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8393a86a-31c6-4c19-8cc7-777e64d8fc19 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.523621] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 818.523621] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f2bff4-7d5f-a9b2-9b91-51dbf0b97dcd" [ 818.523621] env[61852]: _type = "Task" [ 818.523621] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.538864] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f2bff4-7d5f-a9b2-9b91-51dbf0b97dcd, 'name': SearchDatastore_Task, 'duration_secs': 0.010186} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.539655] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.539796] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 818.540115] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.540308] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.540555] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 818.542756] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af3e78b5-80c8-405a-8d29-964d204ab1c9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.550036] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 818.550903] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f20b488-0b2f-4fa2-946f-9b9f1ce95de2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.558543] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292839, 'name': Destroy_Task, 'duration_secs': 0.715371} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.560809] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Destroyed the VM [ 818.561266] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Deleting Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 818.561615] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 818.561818] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 818.562623] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 818.562623] env[61852]: value = "task-1292844" [ 818.562623] env[61852]: _type = "Task" [ 818.562623] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.562894] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-46480644-7f75-4c27-be22-4957e1f2405b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.565718] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb5e8766-bc28-49ca-835a-c6bdb6538128 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.574926] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 818.574926] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f1d8ae-bfa3-5df3-c002-7be6d288f785" [ 818.574926] env[61852]: _type = "Task" [ 818.574926] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.579844] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 818.579844] env[61852]: value = "task-1292845" [ 818.579844] env[61852]: _type = "Task" [ 818.579844] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.583366] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] VM already powered off {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 818.583913] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 818.583980] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.593627] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f1d8ae-bfa3-5df3-c002-7be6d288f785, 'name': SearchDatastore_Task, 'duration_secs': 0.009452} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.597420] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292845, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.597635] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c6e166c-0d0b-4b45-8feb-3ef8a274787b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.602770] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 818.602770] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f5d420-bba7-c86c-8a74-39e4faa51dde" [ 818.602770] env[61852]: _type = "Task" [ 818.602770] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.611451] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f5d420-bba7-c86c-8a74-39e4faa51dde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.657109] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Successfully updated port: 145feb94-c188-4d2a-a614-870d122d1174 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 818.692880] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.693519] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 818.696043] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.977s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.697431] env[61852]: INFO nova.compute.claims [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.736552] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "c94066d5-2e5f-4059-bdc5-385d517f1d84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.736898] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "c94066d5-2e5f-4059-bdc5-385d517f1d84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.737118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "c94066d5-2e5f-4059-bdc5-385d517f1d84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.737310] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "c94066d5-2e5f-4059-bdc5-385d517f1d84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.737481] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "c94066d5-2e5f-4059-bdc5-385d517f1d84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.740488] env[61852]: INFO nova.compute.manager [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Terminating instance [ 818.743199] env[61852]: DEBUG nova.compute.manager [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 818.743199] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 818.743792] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2a65eb-31cb-4f91-9932-2c8325aa908f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.752532] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 818.752774] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be31ea79-4389-4760-a62e-04743fdae36c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.936806] env[61852]: DEBUG nova.network.neutron [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Successfully updated port: 8dd44cd3-3b97-484e-b3ed-ddb88f224343 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.094525] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292845, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.112669] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f5d420-bba7-c86c-8a74-39e4faa51dde, 'name': SearchDatastore_Task, 'duration_secs': 0.010346} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.112928] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.113234] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 0ec1210f-7d42-4b71-abdc-9f818ffb91ea/0ec1210f-7d42-4b71-abdc-9f818ffb91ea.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 819.113524] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.113716] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.113928] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0f66c3f-ed38-4342-a5c7-bd5944113ad1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.116061] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6284f1bc-5bd4-4b73-a8d9-3bc14dd71897 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.122906] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 819.122906] env[61852]: value = "task-1292847" [ 819.122906] env[61852]: _type = "Task" [ 819.122906] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.126840] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.127019] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.128043] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d9dc16f-0b73-44c5-9bf3-eeef21a29459 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.133045] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292847, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.135948] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 819.135948] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5286febb-bdff-ce45-038a-bee0f60d0a15" [ 819.135948] env[61852]: _type = "Task" [ 819.135948] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.143290] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5286febb-bdff-ce45-038a-bee0f60d0a15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.160102] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "refresh_cache-89970cff-cb49-4803-81a5-1675b0ea4aaf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.160257] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired lock "refresh_cache-89970cff-cb49-4803-81a5-1675b0ea4aaf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.161712] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.204200] env[61852]: DEBUG nova.compute.utils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 819.206093] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 819.206333] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.282126] env[61852]: DEBUG nova.policy [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4d6297b7026476c9753affb38106e9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6a9b0e24a2545cf877ccef6701fcac6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 819.440787] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.440787] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquired lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.440787] env[61852]: DEBUG nova.network.neutron [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.573843] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "254919cb-e3cd-4288-8696-95e632d78a38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.574263] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "254919cb-e3cd-4288-8696-95e632d78a38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.596878] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292845, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.610831] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Successfully created port: d1e216a2-48ce-4945-8024-f78b3701fd65 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.636851] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292847, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.647943] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5286febb-bdff-ce45-038a-bee0f60d0a15, 'name': SearchDatastore_Task, 'duration_secs': 0.011658} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.648821] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-382101d4-ec34-4b0d-9108-302071654503 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.657121] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 819.657121] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fe60ab-28dd-16f8-547c-b278b631a290" [ 819.657121] env[61852]: _type = "Task" [ 819.657121] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.668582] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fe60ab-28dd-16f8-547c-b278b631a290, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.699145] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.709726] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 819.801551] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 819.801551] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 819.801733] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleting the datastore file [datastore2] c94066d5-2e5f-4059-bdc5-385d517f1d84 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 819.802413] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e68e687c-14c4-4292-a3b3-0672314c9b5d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.808838] env[61852]: DEBUG oslo_vmware.api [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 819.808838] env[61852]: value = "task-1292848" [ 819.808838] env[61852]: _type = "Task" [ 819.808838] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.822043] env[61852]: DEBUG oslo_vmware.api [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.895894] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Updating instance_info_cache with network_info: [{"id": "145feb94-c188-4d2a-a614-870d122d1174", "address": "fa:16:3e:b9:ea:56", "network": {"id": "50fe42e7-c56b-4943-ae54-bab5c794e094", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-195787110-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6a9b0e24a2545cf877ccef6701fcac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap145feb94-c1", "ovs_interfaceid": "145feb94-c188-4d2a-a614-870d122d1174", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.983163] env[61852]: DEBUG nova.network.neutron [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.030563] env[61852]: DEBUG nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received event network-changed-819604bb-f7cf-449f-8681-bf4901e756e1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.031633] env[61852]: DEBUG nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Refreshing instance network info cache due to event network-changed-819604bb-f7cf-449f-8681-bf4901e756e1. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 820.031746] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Acquiring lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.052040] env[61852]: DEBUG nova.compute.manager [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Received event network-changed-72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.052262] env[61852]: DEBUG nova.compute.manager [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Refreshing instance network info cache due to event network-changed-72d13320-e518-4f1a-98b0-cb48bcb2fe11. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 820.052598] env[61852]: DEBUG oslo_concurrency.lockutils [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] Acquiring lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.052833] env[61852]: DEBUG oslo_concurrency.lockutils [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] Acquired lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.052833] env[61852]: DEBUG nova.network.neutron [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Refreshing network info cache for port 72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.086726] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33117f5f-1f5b-4ef7-81fd-353c9ca0821a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.105670] env[61852]: DEBUG oslo_vmware.api [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292845, 'name': RemoveSnapshot_Task, 'duration_secs': 1.020628} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.106015] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Deleted Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 820.106254] env[61852]: INFO nova.compute.manager [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Took 16.70 seconds to snapshot the instance on the hypervisor. [ 820.109163] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ae4326-28fe-482f-85ac-03d15c2acc78 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.149457] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8850da-8591-4829-a77e-b9e1df7ed1d1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.160195] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292847, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.622696} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.163232] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 0ec1210f-7d42-4b71-abdc-9f818ffb91ea/0ec1210f-7d42-4b71-abdc-9f818ffb91ea.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 820.163505] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 820.164734] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf552c4-0cbd-4f9b-9007-2c288565e736 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.170914] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64a48ddb-1eed-4953-9d45-55ba02269cf3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.183399] env[61852]: DEBUG nova.compute.provider_tree [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.189921] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fe60ab-28dd-16f8-547c-b278b631a290, 'name': SearchDatastore_Task, 'duration_secs': 0.05047} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.190261] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 820.190261] env[61852]: value = "task-1292849" [ 820.190261] env[61852]: _type = "Task" [ 820.190261] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.190767] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.191065] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 8d733f93-7636-447b-a5d5-53c16c30061f/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk. {{(pid=61852) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 820.191384] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f739f82b-1f3e-4d81-9909-7d67b7c61e26 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.204993] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292849, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.209711] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 820.209711] env[61852]: value = "task-1292850" [ 820.209711] env[61852]: _type = "Task" [ 820.209711] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.218682] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.325719] env[61852]: DEBUG oslo_vmware.api [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1292848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175865} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.326012] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 820.326214] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 820.326440] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.326620] env[61852]: INFO nova.compute.manager [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Took 1.58 seconds to destroy the instance on the hypervisor. [ 820.326869] env[61852]: DEBUG oslo.service.loopingcall [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 820.327075] env[61852]: DEBUG nova.compute.manager [-] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 820.327195] env[61852]: DEBUG nova.network.neutron [-] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 820.398937] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Releasing lock "refresh_cache-89970cff-cb49-4803-81a5-1675b0ea4aaf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.399309] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Instance network_info: |[{"id": "145feb94-c188-4d2a-a614-870d122d1174", "address": "fa:16:3e:b9:ea:56", "network": {"id": "50fe42e7-c56b-4943-ae54-bab5c794e094", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-195787110-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6a9b0e24a2545cf877ccef6701fcac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap145feb94-c1", "ovs_interfaceid": "145feb94-c188-4d2a-a614-870d122d1174", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 820.399771] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:ea:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d62c1cf-f39a-4626-9552-f1e13c692636', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '145feb94-c188-4d2a-a614-870d122d1174', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.412945] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Creating folder: Project (f6a9b0e24a2545cf877ccef6701fcac6). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.414759] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2cf13b6-cd33-43f8-a67c-e532d4199d0b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.428539] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Created folder: Project (f6a9b0e24a2545cf877ccef6701fcac6) in parent group-v277280. [ 820.428995] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Creating folder: Instances. Parent ref: group-v277333. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.429300] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c20cbc46-1475-44d0-8c4c-a0390839839e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.440848] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Created folder: Instances in parent group-v277333. [ 820.441168] env[61852]: DEBUG oslo.service.loopingcall [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 820.441544] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.441895] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf68383a-113a-42ce-a744-9b65ac60ace2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.474215] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 820.474215] env[61852]: value = "task-1292853" [ 820.474215] env[61852]: _type = "Task" [ 820.474215] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.484164] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292853, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.568448] env[61852]: DEBUG nova.network.neutron [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Updating instance_info_cache with network_info: [{"id": "819604bb-f7cf-449f-8681-bf4901e756e1", "address": "fa:16:3e:67:fb:6d", "network": {"id": "6af5ce9e-58e2-42ba-a351-b29430e2a962", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-712942607", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap819604bb-f7", "ovs_interfaceid": "819604bb-f7cf-449f-8681-bf4901e756e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92411dba-21d2-474e-9b4f-cda4bea94122", "address": "fa:16:3e:45:5c:3f", "network": {"id": "14fc2af1-19a4-4596-8235-9cc98dac9d76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1810972316", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92411dba-21", "ovs_interfaceid": "92411dba-21d2-474e-9b4f-cda4bea94122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8dd44cd3-3b97-484e-b3ed-ddb88f224343", "address": "fa:16:3e:16:81:8f", "network": {"id": "6af5ce9e-58e2-42ba-a351-b29430e2a962", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-712942607", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dd44cd3-3b", "ovs_interfaceid": "8dd44cd3-3b97-484e-b3ed-ddb88f224343", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.613018] env[61852]: DEBUG nova.compute.manager [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Instance disappeared during snapshot {{(pid=61852) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 820.626066] env[61852]: DEBUG nova.compute.manager [None req-94bed046-3efe-49f3-a839-465e1c2b83bd tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image not found during clean up fb0a83e2-a324-456f-8eb2-051907376bce {{(pid=61852) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4500}} [ 820.692130] env[61852]: DEBUG nova.scheduler.client.report [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 820.707221] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292849, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092813} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.711864] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 820.713495] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d64c58-d864-4eba-89fe-53a2ac58eec2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.724783] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 820.727938] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292850, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510493} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.739889] env[61852]: INFO nova.virt.vmwareapi.ds_util [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 8d733f93-7636-447b-a5d5-53c16c30061f/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk. [ 820.750259] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 0ec1210f-7d42-4b71-abdc-9f818ffb91ea/0ec1210f-7d42-4b71-abdc-9f818ffb91ea.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 820.751192] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f73b2f-cd36-438b-b36d-2c84ceb4df26 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.754739] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5afe4e06-360d-4f79-917f-8079be34aad9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.796494] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 8d733f93-7636-447b-a5d5-53c16c30061f/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 820.799117] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 820.799398] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 820.799564] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.799757] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 820.799943] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.800150] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 820.800369] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 820.800484] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 820.800677] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 820.800838] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 820.801031] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 820.802711] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d911e01-4ad3-4cef-a2a9-586d11787bf3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.816673] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108b73e4-3e54-4ef7-8b8f-441422c58f28 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.820009] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 820.820009] env[61852]: value = "task-1292854" [ 820.820009] env[61852]: _type = "Task" [ 820.820009] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.834715] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 820.834715] env[61852]: value = "task-1292855" [ 820.834715] env[61852]: _type = "Task" [ 820.834715] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.836693] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db65ba1c-1a42-44fb-8727-213220fee321 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.847587] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292854, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.864378] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292855, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.973707] env[61852]: DEBUG nova.network.neutron [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Updated VIF entry in instance network info cache for port 72d13320-e518-4f1a-98b0-cb48bcb2fe11. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 820.974190] env[61852]: DEBUG nova.network.neutron [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Updating instance_info_cache with network_info: [{"id": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "address": "fa:16:3e:74:13:9d", "network": {"id": "dc45adde-e4fb-4495-a4c3-3373c99a2eb7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1925353850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d65efc960c14799bcf1b26ecdf9c912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d13320-e5", "ovs_interfaceid": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.986715] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292853, 'name': CreateVM_Task, 'duration_secs': 0.37083} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.987081] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 820.987478] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.987670] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.988061] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 820.988579] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fe0f421-6470-47bf-aa9c-519ed6e2f4ef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.994602] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 820.994602] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5270db75-e79e-629c-15d1-04139d025d2d" [ 820.994602] env[61852]: _type = "Task" [ 820.994602] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.004733] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5270db75-e79e-629c-15d1-04139d025d2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.071137] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Releasing lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.071565] env[61852]: DEBUG nova.compute.manager [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Instance network_info: |[{"id": "819604bb-f7cf-449f-8681-bf4901e756e1", "address": "fa:16:3e:67:fb:6d", "network": {"id": "6af5ce9e-58e2-42ba-a351-b29430e2a962", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-712942607", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap819604bb-f7", "ovs_interfaceid": "819604bb-f7cf-449f-8681-bf4901e756e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92411dba-21d2-474e-9b4f-cda4bea94122", "address": "fa:16:3e:45:5c:3f", "network": {"id": "14fc2af1-19a4-4596-8235-9cc98dac9d76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1810972316", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92411dba-21", "ovs_interfaceid": "92411dba-21d2-474e-9b4f-cda4bea94122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8dd44cd3-3b97-484e-b3ed-ddb88f224343", "address": "fa:16:3e:16:81:8f", "network": {"id": "6af5ce9e-58e2-42ba-a351-b29430e2a962", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-712942607", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dd44cd3-3b", "ovs_interfaceid": "8dd44cd3-3b97-484e-b3ed-ddb88f224343", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 821.071893] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Acquired lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.072093] env[61852]: DEBUG nova.network.neutron [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Refreshing network info cache for port 819604bb-f7cf-449f-8681-bf4901e756e1 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.073435] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:fb:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '819604bb-f7cf-449f-8681-bf4901e756e1', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:5c:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d054505-89d3-49c5-8b38-5da917a42c49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92411dba-21d2-474e-9b4f-cda4bea94122', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:81:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8dd44cd3-3b97-484e-b3ed-ddb88f224343', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.084258] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Creating folder: Project (0c7f48c684044564b9081d6bc04c7e29). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.085374] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2a079a1-a744-4d4c-a01b-8ded122ab22f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.096716] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Created folder: Project (0c7f48c684044564b9081d6bc04c7e29) in parent group-v277280. [ 821.097246] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Creating folder: Instances. Parent ref: group-v277336. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.097246] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56e2bc39-f9c7-4da3-9546-e2ada0ed3419 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.106945] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Created folder: Instances in parent group-v277336. [ 821.107236] env[61852]: DEBUG oslo.service.loopingcall [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 821.107441] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.107681] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12102e7b-f0c3-4e11-bfaf-4df459abc419 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.133876] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.133876] env[61852]: value = "task-1292858" [ 821.133876] env[61852]: _type = "Task" [ 821.133876] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.142524] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292858, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.171700] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c91179-7a8f-94d6-4e1e-2200793b25ba/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 821.172690] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954fad8b-82cd-4d52-b25d-c7c0dbe24ee7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.178730] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c91179-7a8f-94d6-4e1e-2200793b25ba/disk-0.vmdk is in state: ready. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 821.178903] env[61852]: ERROR oslo_vmware.rw_handles [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c91179-7a8f-94d6-4e1e-2200793b25ba/disk-0.vmdk due to incomplete transfer. [ 821.179152] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-40396b32-b8c9-4304-96f4-9f1db6f40d90 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.188989] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c91179-7a8f-94d6-4e1e-2200793b25ba/disk-0.vmdk. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 821.188989] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Uploaded image e7fa1b16-39e9-4667-b21a-b76c52e00146 to the Glance image server {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 821.188989] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Destroying the VM {{(pid=61852) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 821.188989] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7205ba9f-20ca-4612-9fa5-c48394121b13 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.195409] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 821.195409] env[61852]: value = "task-1292859" [ 821.195409] env[61852]: _type = "Task" [ 821.195409] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.205047] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.205630] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 821.208201] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292859, 'name': Destroy_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.208726] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.971s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.208920] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.210865] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.522s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.211122] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.212850] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.226s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.213063] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.214698] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.220s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.216142] env[61852]: INFO nova.compute.claims [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.251327] env[61852]: INFO nova.scheduler.client.report [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Deleted allocations for instance 46ccab1f-b7af-49df-a38d-af1fa3bac486 [ 821.259140] env[61852]: INFO nova.scheduler.client.report [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Deleted allocations for instance d48cefda-0b05-4ec0-8c1d-bc25cd491faf [ 821.285972] env[61852]: INFO nova.scheduler.client.report [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Deleted allocations for instance cb50d964-5c0e-4cf3-b652-0f7b7a488f91 [ 821.329589] env[61852]: DEBUG nova.network.neutron [-] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.336576] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292854, 'name': ReconfigVM_Task, 'duration_secs': 0.368259} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.337146] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 0ec1210f-7d42-4b71-abdc-9f818ffb91ea/0ec1210f-7d42-4b71-abdc-9f818ffb91ea.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 821.338010] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3fb8fed-223b-41ff-b4d8-f7c8fe3522e4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.347640] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 821.347640] env[61852]: value = "task-1292860" [ 821.347640] env[61852]: _type = "Task" [ 821.347640] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.358498] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292855, 'name': ReconfigVM_Task, 'duration_secs': 0.380832} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.359405] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 8d733f93-7636-447b-a5d5-53c16c30061f/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 821.360794] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763aa2f3-8d17-49d4-969c-148b39603de3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.368434] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292860, 'name': Rename_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.396121] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1fb0062-aa9d-4e57-a699-fd658723bad4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.412494] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 821.412494] env[61852]: value = "task-1292861" [ 821.412494] env[61852]: _type = "Task" [ 821.412494] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.422939] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292861, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.480729] env[61852]: DEBUG oslo_concurrency.lockutils [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] Releasing lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.481099] env[61852]: DEBUG nova.compute.manager [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received event network-vif-plugged-8dd44cd3-3b97-484e-b3ed-ddb88f224343 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 821.481332] env[61852]: DEBUG oslo_concurrency.lockutils [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] Acquiring lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.481561] env[61852]: DEBUG oslo_concurrency.lockutils [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.481744] env[61852]: DEBUG oslo_concurrency.lockutils [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.481943] env[61852]: DEBUG nova.compute.manager [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] No waiting events found dispatching network-vif-plugged-8dd44cd3-3b97-484e-b3ed-ddb88f224343 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 821.482135] env[61852]: WARNING nova.compute.manager [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received unexpected event network-vif-plugged-8dd44cd3-3b97-484e-b3ed-ddb88f224343 for instance with vm_state building and task_state spawning. [ 821.482429] env[61852]: DEBUG nova.compute.manager [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received event network-changed-8dd44cd3-3b97-484e-b3ed-ddb88f224343 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 821.482551] env[61852]: DEBUG nova.compute.manager [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Refreshing instance network info cache due to event network-changed-8dd44cd3-3b97-484e-b3ed-ddb88f224343. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 821.482744] env[61852]: DEBUG oslo_concurrency.lockutils [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] Acquiring lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.506110] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5270db75-e79e-629c-15d1-04139d025d2d, 'name': SearchDatastore_Task, 'duration_secs': 0.017663} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.506478] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.506688] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 821.506971] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.507164] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.507353] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.508761] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95c55f11-ee34-4573-ab87-6b637e53394e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.519836] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.520101] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 821.522017] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1741607-002f-46d8-ac44-36e22d3c6ded {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.526732] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 821.526732] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5296ec7d-08eb-5383-7b69-583f404e4a37" [ 821.526732] env[61852]: _type = "Task" [ 821.526732] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.536378] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5296ec7d-08eb-5383-7b69-583f404e4a37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.648180] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Successfully updated port: d1e216a2-48ce-4945-8024-f78b3701fd65 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.648180] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292858, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.705201] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292859, 'name': Destroy_Task, 'duration_secs': 0.360993} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.705201] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Destroyed the VM [ 821.705412] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Deleting Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 821.705828] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5e40c091-a6cb-4601-9766-83c2a48521f2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.710689] env[61852]: DEBUG nova.compute.utils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 821.713581] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 821.713581] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 821.717186] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 821.717186] env[61852]: value = "task-1292862" [ 821.717186] env[61852]: _type = "Task" [ 821.717186] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.729515] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292862, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.760156] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8c2dca9f-23cf-4e8d-ad71-1d011fd28ed2 tempest-ServerAddressesNegativeTestJSON-590678596 tempest-ServerAddressesNegativeTestJSON-590678596-project-member] Lock "46ccab1f-b7af-49df-a38d-af1fa3bac486" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.491s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.768171] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bbf093e5-76a9-4051-9aaa-4789b7ebb7c2 tempest-ServersAdminTestJSON-1466837219 tempest-ServersAdminTestJSON-1466837219-project-member] Lock "d48cefda-0b05-4ec0-8c1d-bc25cd491faf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.657s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.793402] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30479fef-9b5a-4dd8-815e-3208f924c582 tempest-ServerTagsTestJSON-168010359 tempest-ServerTagsTestJSON-168010359-project-member] Lock "cb50d964-5c0e-4cf3-b652-0f7b7a488f91" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.005s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.837746] env[61852]: INFO nova.compute.manager [-] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Took 1.51 seconds to deallocate network for instance. [ 821.859406] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292860, 'name': Rename_Task, 'duration_secs': 0.155799} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.859684] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 821.859934] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af1e02e1-bb16-4af5-8593-72881152a73b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.868278] env[61852]: DEBUG nova.policy [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4d6297b7026476c9753affb38106e9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6a9b0e24a2545cf877ccef6701fcac6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 821.873816] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 821.873816] env[61852]: value = "task-1292863" [ 821.873816] env[61852]: _type = "Task" [ 821.873816] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.885359] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292863, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.925454] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292861, 'name': ReconfigVM_Task, 'duration_secs': 0.176968} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.925786] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 821.926056] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a21d22a0-f3f5-493f-ad7d-52dc694df804 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.932462] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 821.932462] env[61852]: value = "task-1292864" [ 821.932462] env[61852]: _type = "Task" [ 821.932462] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.940315] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.989035] env[61852]: DEBUG nova.network.neutron [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Updated VIF entry in instance network info cache for port 819604bb-f7cf-449f-8681-bf4901e756e1. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.989711] env[61852]: DEBUG nova.network.neutron [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Updating instance_info_cache with network_info: [{"id": "819604bb-f7cf-449f-8681-bf4901e756e1", "address": "fa:16:3e:67:fb:6d", "network": {"id": "6af5ce9e-58e2-42ba-a351-b29430e2a962", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-712942607", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap819604bb-f7", "ovs_interfaceid": "819604bb-f7cf-449f-8681-bf4901e756e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92411dba-21d2-474e-9b4f-cda4bea94122", "address": "fa:16:3e:45:5c:3f", "network": {"id": "14fc2af1-19a4-4596-8235-9cc98dac9d76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1810972316", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92411dba-21", "ovs_interfaceid": "92411dba-21d2-474e-9b4f-cda4bea94122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8dd44cd3-3b97-484e-b3ed-ddb88f224343", "address": "fa:16:3e:16:81:8f", "network": {"id": "6af5ce9e-58e2-42ba-a351-b29430e2a962", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-712942607", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dd44cd3-3b", "ovs_interfaceid": "8dd44cd3-3b97-484e-b3ed-ddb88f224343", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.042622] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5296ec7d-08eb-5383-7b69-583f404e4a37, 'name': SearchDatastore_Task, 'duration_secs': 0.015939} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.042622] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65ed30a9-7ac1-4790-a289-3b0a0567c21b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.046070] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 822.046070] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522b3c72-738f-7dc9-7546-84029a4d65f3" [ 822.046070] env[61852]: _type = "Task" [ 822.046070] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.055031] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522b3c72-738f-7dc9-7546-84029a4d65f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.127107] env[61852]: DEBUG nova.compute.manager [req-fd8cfd00-4430-475a-856c-7a0fbd3fa050 req-a7fd919f-6a78-49b0-9491-f2a6b8f817a8 service nova] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Received event network-vif-deleted-241fcec0-c8cc-40b0-bfbc-becb93226dc0 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.145362] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292858, 'name': CreateVM_Task, 'duration_secs': 0.544516} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.145553] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.147118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.147118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.147559] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 822.147652] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61b99044-f2b9-4f02-b12a-c778a2621947 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.151432] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "refresh_cache-f8ebb1b7-39c6-486e-ab25-23080d858846" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.151432] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired lock "refresh_cache-f8ebb1b7-39c6-486e-ab25-23080d858846" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.151542] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.153527] env[61852]: DEBUG nova.compute.manager [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Received event network-vif-plugged-d1e216a2-48ce-4945-8024-f78b3701fd65 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.153918] env[61852]: DEBUG oslo_concurrency.lockutils [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] Acquiring lock "f8ebb1b7-39c6-486e-ab25-23080d858846-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.153956] env[61852]: DEBUG oslo_concurrency.lockutils [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] Lock "f8ebb1b7-39c6-486e-ab25-23080d858846-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.154122] env[61852]: DEBUG oslo_concurrency.lockutils [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] Lock "f8ebb1b7-39c6-486e-ab25-23080d858846-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.154318] env[61852]: DEBUG nova.compute.manager [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] No waiting events found dispatching network-vif-plugged-d1e216a2-48ce-4945-8024-f78b3701fd65 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 822.154469] env[61852]: WARNING nova.compute.manager [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Received unexpected event network-vif-plugged-d1e216a2-48ce-4945-8024-f78b3701fd65 for instance with vm_state building and task_state spawning. [ 822.154635] env[61852]: DEBUG nova.compute.manager [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Received event network-changed-d1e216a2-48ce-4945-8024-f78b3701fd65 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.154794] env[61852]: DEBUG nova.compute.manager [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Refreshing instance network info cache due to event network-changed-d1e216a2-48ce-4945-8024-f78b3701fd65. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 822.154959] env[61852]: DEBUG oslo_concurrency.lockutils [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] Acquiring lock "refresh_cache-f8ebb1b7-39c6-486e-ab25-23080d858846" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.159125] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 822.159125] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e5b93c-2047-820d-1190-12cf20622030" [ 822.159125] env[61852]: _type = "Task" [ 822.159125] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.173336] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e5b93c-2047-820d-1190-12cf20622030, 'name': SearchDatastore_Task, 'duration_secs': 0.009298} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.173650] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.173875] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.176015] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.217106] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 822.237316] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292862, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.345358] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.387142] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292863, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.445755] env[61852]: DEBUG oslo_vmware.api [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1292864, 'name': PowerOnVM_Task, 'duration_secs': 0.454535} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.446042] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 822.449209] env[61852]: DEBUG nova.compute.manager [None req-6d921ba8-864e-48ff-a4d4-a21ff3775473 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 822.449596] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195d2120-a4b1-457a-88fc-893c58d559e8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.492939] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Releasing lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.493290] env[61852]: DEBUG nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received event network-vif-plugged-92411dba-21d2-474e-9b4f-cda4bea94122 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.493507] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Acquiring lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.493723] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.493895] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.494072] env[61852]: DEBUG nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] No waiting events found dispatching network-vif-plugged-92411dba-21d2-474e-9b4f-cda4bea94122 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 822.494250] env[61852]: WARNING nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received unexpected event network-vif-plugged-92411dba-21d2-474e-9b4f-cda4bea94122 for instance with vm_state building and task_state spawning. [ 822.494426] env[61852]: DEBUG nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received event network-changed-92411dba-21d2-474e-9b4f-cda4bea94122 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.494590] env[61852]: DEBUG nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Refreshing instance network info cache due to event network-changed-92411dba-21d2-474e-9b4f-cda4bea94122. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 822.494860] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Acquiring lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.494935] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Acquired lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.495309] env[61852]: DEBUG nova.network.neutron [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Refreshing network info cache for port 92411dba-21d2-474e-9b4f-cda4bea94122 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 822.547994] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ec9d18-5878-48e6-8d2c-75fb918ef7d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.563302] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522b3c72-738f-7dc9-7546-84029a4d65f3, 'name': SearchDatastore_Task, 'duration_secs': 0.00966} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.565259] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.565545] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 89970cff-cb49-4803-81a5-1675b0ea4aaf/89970cff-cb49-4803-81a5-1675b0ea4aaf.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.565884] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.566101] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.566317] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a35233e3-61a9-43fc-9774-23435b8f41d1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.568899] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f96c70-e76f-422e-8a73-661ff88d3a48 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.572433] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4211ea46-7618-4888-9788-f8fd146e7515 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.607691] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0d82c9-ffc4-4218-adbe-96f7768e6258 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.610658] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.611068] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.612147] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 822.612147] env[61852]: value = "task-1292865" [ 822.612147] env[61852]: _type = "Task" [ 822.612147] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.612317] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff72773c-9082-4503-a9a8-f6d8f8928d0a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.621403] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2023184-9d99-4e9a-a2cf-5d78f9d6cd60 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.629735] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.630237] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 822.630237] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529df65a-4a2f-21fb-dce1-59967b0a829c" [ 822.630237] env[61852]: _type = "Task" [ 822.630237] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.641838] env[61852]: DEBUG nova.compute.provider_tree [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 822.653668] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529df65a-4a2f-21fb-dce1-59967b0a829c, 'name': SearchDatastore_Task, 'duration_secs': 0.008494} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.653668] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6c9f06a-1be4-41de-a3b6-7ddc453c9397 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.658656] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 822.658656] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b7cdef-f2bd-969a-4759-9451e767e43e" [ 822.658656] env[61852]: _type = "Task" [ 822.658656] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.668667] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b7cdef-f2bd-969a-4759-9451e767e43e, 'name': SearchDatastore_Task, 'duration_secs': 0.008573} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.668951] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.669239] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] e265a4be-7b37-40b5-a199-42a7cd945f66/e265a4be-7b37-40b5-a199-42a7cd945f66.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.669509] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6dfb5384-c841-48f7-ae92-01debe4849c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.677255] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 822.677255] env[61852]: value = "task-1292866" [ 822.677255] env[61852]: _type = "Task" [ 822.677255] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.684618] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292866, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.715421] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.739478] env[61852]: DEBUG oslo_vmware.api [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292862, 'name': RemoveSnapshot_Task, 'duration_secs': 0.957873} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.739856] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Deleted Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 822.740149] env[61852]: INFO nova.compute.manager [None req-6ca3864d-96bf-4fd0-b6cf-d01eafdb0f66 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Took 16.41 seconds to snapshot the instance on the hypervisor. [ 822.886914] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292863, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.985093] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Updating instance_info_cache with network_info: [{"id": "d1e216a2-48ce-4945-8024-f78b3701fd65", "address": "fa:16:3e:b9:cd:b1", "network": {"id": "50fe42e7-c56b-4943-ae54-bab5c794e094", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-195787110-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6a9b0e24a2545cf877ccef6701fcac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1e216a2-48", "ovs_interfaceid": "d1e216a2-48ce-4945-8024-f78b3701fd65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.135220] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292865, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478852} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.135220] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 89970cff-cb49-4803-81a5-1675b0ea4aaf/89970cff-cb49-4803-81a5-1675b0ea4aaf.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.135694] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.136020] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-efea69f5-c90e-4072-a32b-ca7aac706047 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.149387] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 823.149387] env[61852]: value = "task-1292867" [ 823.149387] env[61852]: _type = "Task" [ 823.149387] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.161320] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Successfully created port: 33def83c-31aa-4bb0-9af4-8c7657457d6f {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 823.166867] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292867, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.171749] env[61852]: ERROR nova.scheduler.client.report [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [req-e694a161-ce81-4baf-9a24-f3baab410f0a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e694a161-ce81-4baf-9a24-f3baab410f0a"}]} [ 823.191113] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292866, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.194698] env[61852]: DEBUG nova.scheduler.client.report [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 823.214917] env[61852]: DEBUG nova.scheduler.client.report [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 823.215230] env[61852]: DEBUG nova.compute.provider_tree [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 823.228846] env[61852]: DEBUG nova.scheduler.client.report [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 823.235651] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 823.261308] env[61852]: DEBUG nova.scheduler.client.report [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 823.278908] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 823.279191] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 823.279670] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 823.279670] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 823.279798] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 823.282204] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 823.282204] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 823.282204] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 823.282204] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 823.282204] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 823.282204] env[61852]: DEBUG nova.virt.hardware [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 823.282204] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af30fd1e-05f4-4c4e-82a9-b87bf6ad7b80 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.292248] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-308a6a2b-7e8a-47ac-a639-a2bdba3fe7ce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.369387] env[61852]: DEBUG nova.network.neutron [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Updated VIF entry in instance network info cache for port 92411dba-21d2-474e-9b4f-cda4bea94122. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 823.370022] env[61852]: DEBUG nova.network.neutron [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Updating instance_info_cache with network_info: [{"id": "819604bb-f7cf-449f-8681-bf4901e756e1", "address": "fa:16:3e:67:fb:6d", "network": {"id": "6af5ce9e-58e2-42ba-a351-b29430e2a962", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-712942607", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap819604bb-f7", "ovs_interfaceid": "819604bb-f7cf-449f-8681-bf4901e756e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92411dba-21d2-474e-9b4f-cda4bea94122", "address": "fa:16:3e:45:5c:3f", "network": {"id": "14fc2af1-19a4-4596-8235-9cc98dac9d76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1810972316", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92411dba-21", "ovs_interfaceid": "92411dba-21d2-474e-9b4f-cda4bea94122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8dd44cd3-3b97-484e-b3ed-ddb88f224343", "address": "fa:16:3e:16:81:8f", "network": {"id": "6af5ce9e-58e2-42ba-a351-b29430e2a962", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-712942607", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dd44cd3-3b", "ovs_interfaceid": "8dd44cd3-3b97-484e-b3ed-ddb88f224343", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.389732] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292863, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.490180] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Releasing lock "refresh_cache-f8ebb1b7-39c6-486e-ab25-23080d858846" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.490180] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Instance network_info: |[{"id": "d1e216a2-48ce-4945-8024-f78b3701fd65", "address": "fa:16:3e:b9:cd:b1", "network": {"id": "50fe42e7-c56b-4943-ae54-bab5c794e094", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-195787110-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6a9b0e24a2545cf877ccef6701fcac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1e216a2-48", "ovs_interfaceid": "d1e216a2-48ce-4945-8024-f78b3701fd65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 823.490557] env[61852]: DEBUG oslo_concurrency.lockutils [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] Acquired lock "refresh_cache-f8ebb1b7-39c6-486e-ab25-23080d858846" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.490830] env[61852]: DEBUG nova.network.neutron [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Refreshing network info cache for port d1e216a2-48ce-4945-8024-f78b3701fd65 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.492449] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:cd:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d62c1cf-f39a-4626-9552-f1e13c692636', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1e216a2-48ce-4945-8024-f78b3701fd65', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.501281] env[61852]: DEBUG oslo.service.loopingcall [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 823.504527] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.505315] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e31320a-6727-4403-9004-de30bf08f3ba {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.534339] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.534339] env[61852]: value = "task-1292868" [ 823.534339] env[61852]: _type = "Task" [ 823.534339] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.544565] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292868, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.641689] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295e20a1-5077-45b0-a93f-2daebed96b21 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.653319] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8fdeea-15fd-486e-bdc5-7eb5869641d0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.666694] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292867, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.292781} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.694122] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 823.695406] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742ef6c8-95cc-459d-8cd0-6d0b26fdfa8c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.702086] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00d37aa-7d81-4204-8c29-7ecaab4e6dff {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.712107] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292866, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.747949} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.722705] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] e265a4be-7b37-40b5-a199-42a7cd945f66/e265a4be-7b37-40b5-a199-42a7cd945f66.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.722705] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.731884] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 89970cff-cb49-4803-81a5-1675b0ea4aaf/89970cff-cb49-4803-81a5-1675b0ea4aaf.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.732248] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5ad554e-f32e-43d5-bf90-777fda076f23 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.734900] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68d525b1-bf42-4a15-bb57-69dc4d80936c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.752765] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e091ce-b4f4-4616-8505-d493cd344c76 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.768607] env[61852]: DEBUG nova.compute.provider_tree [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 823.772714] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 823.772714] env[61852]: value = "task-1292869" [ 823.772714] env[61852]: _type = "Task" [ 823.772714] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.773822] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 823.773822] env[61852]: value = "task-1292870" [ 823.773822] env[61852]: _type = "Task" [ 823.773822] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.786212] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292869, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.791019] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292870, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.873593] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Releasing lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.877026] env[61852]: DEBUG nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Received event network-vif-plugged-145feb94-c188-4d2a-a614-870d122d1174 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 823.877026] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Acquiring lock "89970cff-cb49-4803-81a5-1675b0ea4aaf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.877026] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Lock "89970cff-cb49-4803-81a5-1675b0ea4aaf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.877026] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Lock "89970cff-cb49-4803-81a5-1675b0ea4aaf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.877026] env[61852]: DEBUG nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] No waiting events found dispatching network-vif-plugged-145feb94-c188-4d2a-a614-870d122d1174 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 823.877026] env[61852]: WARNING nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Received unexpected event network-vif-plugged-145feb94-c188-4d2a-a614-870d122d1174 for instance with vm_state building and task_state spawning. [ 823.877026] env[61852]: DEBUG nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Received event network-changed-145feb94-c188-4d2a-a614-870d122d1174 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 823.877026] env[61852]: DEBUG nova.compute.manager [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Refreshing instance network info cache due to event network-changed-145feb94-c188-4d2a-a614-870d122d1174. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 823.877026] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Acquiring lock "refresh_cache-89970cff-cb49-4803-81a5-1675b0ea4aaf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.877026] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Acquired lock "refresh_cache-89970cff-cb49-4803-81a5-1675b0ea4aaf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.877026] env[61852]: DEBUG nova.network.neutron [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Refreshing network info cache for port 145feb94-c188-4d2a-a614-870d122d1174 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.878065] env[61852]: DEBUG oslo_concurrency.lockutils [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] Acquired lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.878920] env[61852]: DEBUG nova.network.neutron [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Refreshing network info cache for port 8dd44cd3-3b97-484e-b3ed-ddb88f224343 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.899092] env[61852]: DEBUG oslo_vmware.api [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292863, 'name': PowerOnVM_Task, 'duration_secs': 1.589823} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.899678] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 823.900463] env[61852]: INFO nova.compute.manager [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Took 8.00 seconds to spawn the instance on the hypervisor. [ 823.900774] env[61852]: DEBUG nova.compute.manager [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 823.901661] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2b2ecf-f2bf-469c-a75e-87be3af4221e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.045096] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292868, 'name': CreateVM_Task} progress is 25%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.295267] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292869, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115659} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.299038] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 824.299466] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292870, 'name': ReconfigVM_Task, 'duration_secs': 0.442412} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.300354] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788bf344-a82b-41db-a0f8-88f50429b6ae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.304269] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 89970cff-cb49-4803-81a5-1675b0ea4aaf/89970cff-cb49-4803-81a5-1675b0ea4aaf.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.304786] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7aa04acc-54d3-43af-b5fd-d39926c56892 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.339937] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] e265a4be-7b37-40b5-a199-42a7cd945f66/e265a4be-7b37-40b5-a199-42a7cd945f66.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.341629] env[61852]: DEBUG nova.scheduler.client.report [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 85 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 824.341984] env[61852]: DEBUG nova.compute.provider_tree [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 85 to 86 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 824.342316] env[61852]: DEBUG nova.compute.provider_tree [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 824.347104] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4692f2ea-79fc-4a41-8192-3cd74a23beae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.363280] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 824.363280] env[61852]: value = "task-1292871" [ 824.363280] env[61852]: _type = "Task" [ 824.363280] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.373253] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 824.373253] env[61852]: value = "task-1292872" [ 824.373253] env[61852]: _type = "Task" [ 824.373253] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.386590] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292871, 'name': Rename_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.393083] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292872, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.424405] env[61852]: INFO nova.compute.manager [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Took 33.92 seconds to build instance. [ 824.512274] env[61852]: DEBUG nova.network.neutron [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Updated VIF entry in instance network info cache for port d1e216a2-48ce-4945-8024-f78b3701fd65. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.512656] env[61852]: DEBUG nova.network.neutron [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Updating instance_info_cache with network_info: [{"id": "d1e216a2-48ce-4945-8024-f78b3701fd65", "address": "fa:16:3e:b9:cd:b1", "network": {"id": "50fe42e7-c56b-4943-ae54-bab5c794e094", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-195787110-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6a9b0e24a2545cf877ccef6701fcac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1e216a2-48", "ovs_interfaceid": "d1e216a2-48ce-4945-8024-f78b3701fd65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.549870] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292868, 'name': CreateVM_Task, 'duration_secs': 0.790349} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.549870] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 824.550468] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.550648] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.550979] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 824.551263] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67bd650c-03ca-4fef-91c3-8189750af6ee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.557572] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 824.557572] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fa1570-9187-dd3b-0c99-2b98b2e976f1" [ 824.557572] env[61852]: _type = "Task" [ 824.557572] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.564725] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fa1570-9187-dd3b-0c99-2b98b2e976f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.865546] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.651s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.866099] env[61852]: DEBUG nova.compute.manager [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 824.869832] env[61852]: DEBUG nova.network.neutron [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Updated VIF entry in instance network info cache for port 145feb94-c188-4d2a-a614-870d122d1174. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.870229] env[61852]: DEBUG nova.network.neutron [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Updating instance_info_cache with network_info: [{"id": "145feb94-c188-4d2a-a614-870d122d1174", "address": "fa:16:3e:b9:ea:56", "network": {"id": "50fe42e7-c56b-4943-ae54-bab5c794e094", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-195787110-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6a9b0e24a2545cf877ccef6701fcac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap145feb94-c1", "ovs_interfaceid": "145feb94-c188-4d2a-a614-870d122d1174", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.871972] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.384s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.873735] env[61852]: INFO nova.compute.claims [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.879115] env[61852]: DEBUG nova.network.neutron [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Updated VIF entry in instance network info cache for port 8dd44cd3-3b97-484e-b3ed-ddb88f224343. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.879624] env[61852]: DEBUG nova.network.neutron [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Updating instance_info_cache with network_info: [{"id": "819604bb-f7cf-449f-8681-bf4901e756e1", "address": "fa:16:3e:67:fb:6d", "network": {"id": "6af5ce9e-58e2-42ba-a351-b29430e2a962", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-712942607", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap819604bb-f7", "ovs_interfaceid": "819604bb-f7cf-449f-8681-bf4901e756e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92411dba-21d2-474e-9b4f-cda4bea94122", "address": "fa:16:3e:45:5c:3f", "network": {"id": "14fc2af1-19a4-4596-8235-9cc98dac9d76", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1810972316", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92411dba-21", "ovs_interfaceid": "92411dba-21d2-474e-9b4f-cda4bea94122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8dd44cd3-3b97-484e-b3ed-ddb88f224343", "address": "fa:16:3e:16:81:8f", "network": {"id": "6af5ce9e-58e2-42ba-a351-b29430e2a962", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-712942607", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dd44cd3-3b", "ovs_interfaceid": "8dd44cd3-3b97-484e-b3ed-ddb88f224343", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.903033] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292871, 'name': Rename_Task, 'duration_secs': 0.134321} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.903033] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292872, 'name': ReconfigVM_Task, 'duration_secs': 0.287465} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.903033] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.903033] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Reconfigured VM instance instance-0000003d to attach disk [datastore1] e265a4be-7b37-40b5-a199-42a7cd945f66/e265a4be-7b37-40b5-a199-42a7cd945f66.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.904139] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-773acb13-5edf-4a26-879c-195a4f680bcc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.905999] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc397543-bcc4-41ec-baeb-ac8ef2803af9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.917489] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 824.917489] env[61852]: value = "task-1292874" [ 824.917489] env[61852]: _type = "Task" [ 824.917489] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.917999] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 824.917999] env[61852]: value = "task-1292873" [ 824.917999] env[61852]: _type = "Task" [ 824.917999] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.927266] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4a91f47e-d40e-4b6c-85de-997adc01f731 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.067s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.932818] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292874, 'name': Rename_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.936626] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292873, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.015515] env[61852]: DEBUG oslo_concurrency.lockutils [req-6e65fb7d-df0d-40fd-af68-75a5d943fcf5 req-38436ced-e161-4af9-a6bc-896ce0d9dfc7 service nova] Releasing lock "refresh_cache-f8ebb1b7-39c6-486e-ab25-23080d858846" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.073023] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fa1570-9187-dd3b-0c99-2b98b2e976f1, 'name': SearchDatastore_Task, 'duration_secs': 0.011305} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.073023] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.073023] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 825.073023] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.073023] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.073023] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 825.073023] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72dc206e-6854-4c63-97ee-cfc123cf8330 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.080299] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 825.080489] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 825.081237] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f43b849b-c683-48e4-a337-0aec97775832 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.086826] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 825.086826] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a7693e-728a-3a32-b2ea-94505315d789" [ 825.086826] env[61852]: _type = "Task" [ 825.086826] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.099324] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a7693e-728a-3a32-b2ea-94505315d789, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.190195] env[61852]: DEBUG nova.compute.manager [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 825.191276] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a4f8ed-e702-4630-b174-4cfaa7894366 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.383143] env[61852]: DEBUG oslo_concurrency.lockutils [req-d90be1fc-8862-4e10-a9c1-ba9dcd23f5b1 req-c52fcb75-3f27-4763-9332-352e762232da service nova] Releasing lock "refresh_cache-89970cff-cb49-4803-81a5-1675b0ea4aaf" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.384627] env[61852]: DEBUG nova.compute.utils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 825.386056] env[61852]: DEBUG nova.compute.manager [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 825.386239] env[61852]: DEBUG nova.network.neutron [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 825.413636] env[61852]: DEBUG oslo_concurrency.lockutils [req-a01efe60-6eb4-4da5-ba59-6fbedd381461 req-2cc81877-950f-47f1-b83d-13e9381fbabb service nova] Releasing lock "refresh_cache-e265a4be-7b37-40b5-a199-42a7cd945f66" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.431339] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292874, 'name': Rename_Task, 'duration_secs': 0.139259} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.434988] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.435145] env[61852]: DEBUG nova.compute.manager [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 825.437826] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292873, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.438249] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-400ad64a-3d94-414d-bcfa-a78568dbcfb2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.445754] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 825.445754] env[61852]: value = "task-1292875" [ 825.445754] env[61852]: _type = "Task" [ 825.445754] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.451655] env[61852]: DEBUG nova.policy [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeca45e07f5b41e38b9ab8ac31bad06c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14a017ea2b084ae0ad2994dda7809c7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 825.458846] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292875, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.605017] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a7693e-728a-3a32-b2ea-94505315d789, 'name': SearchDatastore_Task, 'duration_secs': 0.008369} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.605017] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a113c4b9-363e-4d9a-9997-78ba95e45a87 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.612045] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 825.612045] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb762f-a35a-28c3-80a9-8013544bff9e" [ 825.612045] env[61852]: _type = "Task" [ 825.612045] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.622093] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb762f-a35a-28c3-80a9-8013544bff9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.674843] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.675115] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.706923] env[61852]: INFO nova.compute.manager [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] instance snapshotting [ 825.711749] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced36a2b-617d-444c-83ee-f2f475f1a625 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.744047] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Successfully updated port: 33def83c-31aa-4bb0-9af4-8c7657457d6f {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 825.748222] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b77aa0-5a4c-4a4c-b3e9-851e11f3736f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.892038] env[61852]: DEBUG nova.compute.manager [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 825.932532] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292873, 'name': PowerOnVM_Task, 'duration_secs': 0.932837} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.932870] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.933225] env[61852]: INFO nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Took 7.66 seconds to spawn the instance on the hypervisor. [ 825.933467] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 825.935568] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994dc8aa-f7ae-4a94-9ae8-0634fe5d34ef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.949991] env[61852]: DEBUG nova.network.neutron [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Successfully created port: d6bff739-5602-402b-8bb2-eb9bb4ab0bd7 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.970094] env[61852]: DEBUG oslo_vmware.api [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292875, 'name': PowerOnVM_Task, 'duration_secs': 0.460329} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.970094] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.970332] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.970599] env[61852]: INFO nova.compute.manager [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Took 12.63 seconds to spawn the instance on the hypervisor. [ 825.970943] env[61852]: DEBUG nova.compute.manager [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 825.972042] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1832aec6-272b-4f8d-bb3f-e5e04ed3bfae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.030942] env[61852]: DEBUG nova.compute.manager [req-e73f21f0-4503-47d0-852c-ea72b6291e06 req-a8aefc8c-ed7f-4928-bb3d-0bff21c1c36d service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Received event network-vif-plugged-33def83c-31aa-4bb0-9af4-8c7657457d6f {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 826.032013] env[61852]: DEBUG oslo_concurrency.lockutils [req-e73f21f0-4503-47d0-852c-ea72b6291e06 req-a8aefc8c-ed7f-4928-bb3d-0bff21c1c36d service nova] Acquiring lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.032495] env[61852]: DEBUG oslo_concurrency.lockutils [req-e73f21f0-4503-47d0-852c-ea72b6291e06 req-a8aefc8c-ed7f-4928-bb3d-0bff21c1c36d service nova] Lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.032495] env[61852]: DEBUG oslo_concurrency.lockutils [req-e73f21f0-4503-47d0-852c-ea72b6291e06 req-a8aefc8c-ed7f-4928-bb3d-0bff21c1c36d service nova] Lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.032676] env[61852]: DEBUG nova.compute.manager [req-e73f21f0-4503-47d0-852c-ea72b6291e06 req-a8aefc8c-ed7f-4928-bb3d-0bff21c1c36d service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] No waiting events found dispatching network-vif-plugged-33def83c-31aa-4bb0-9af4-8c7657457d6f {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 826.032843] env[61852]: WARNING nova.compute.manager [req-e73f21f0-4503-47d0-852c-ea72b6291e06 req-a8aefc8c-ed7f-4928-bb3d-0bff21c1c36d service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Received unexpected event network-vif-plugged-33def83c-31aa-4bb0-9af4-8c7657457d6f for instance with vm_state building and task_state spawning. [ 826.124390] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb762f-a35a-28c3-80a9-8013544bff9e, 'name': SearchDatastore_Task, 'duration_secs': 0.010367} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.124689] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.124949] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] f8ebb1b7-39c6-486e-ab25-23080d858846/f8ebb1b7-39c6-486e-ab25-23080d858846.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 826.125411] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-756bfb4a-dae1-4cc5-815d-1f4ed19a04bb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.131359] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 826.131359] env[61852]: value = "task-1292876" [ 826.131359] env[61852]: _type = "Task" [ 826.131359] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.141569] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292876, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.174151] env[61852]: DEBUG nova.compute.manager [req-d73d2a42-a1bd-4d7f-965b-5d56f416d7ec req-29ab962a-89fa-47ff-913b-87b3606aa708 service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Received event network-changed-72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 826.174151] env[61852]: DEBUG nova.compute.manager [req-d73d2a42-a1bd-4d7f-965b-5d56f416d7ec req-29ab962a-89fa-47ff-913b-87b3606aa708 service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Refreshing instance network info cache due to event network-changed-72d13320-e518-4f1a-98b0-cb48bcb2fe11. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 826.174151] env[61852]: DEBUG oslo_concurrency.lockutils [req-d73d2a42-a1bd-4d7f-965b-5d56f416d7ec req-29ab962a-89fa-47ff-913b-87b3606aa708 service nova] Acquiring lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.174151] env[61852]: DEBUG oslo_concurrency.lockutils [req-d73d2a42-a1bd-4d7f-965b-5d56f416d7ec req-29ab962a-89fa-47ff-913b-87b3606aa708 service nova] Acquired lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.174485] env[61852]: DEBUG nova.network.neutron [req-d73d2a42-a1bd-4d7f-965b-5d56f416d7ec req-29ab962a-89fa-47ff-913b-87b3606aa708 service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Refreshing network info cache for port 72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 826.257955] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "refresh_cache-883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.260569] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired lock "refresh_cache-883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.260807] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.267583] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Creating Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 826.267583] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c2e5e624-f9a4-4f15-839a-f196475ae55a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.277295] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 826.277295] env[61852]: value = "task-1292877" [ 826.277295] env[61852]: _type = "Task" [ 826.277295] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.287411] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292877, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.400659] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8e1f3e-7faf-4c20-a7c6-262c0b179f51 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.414751] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07541704-5325-47e6-84e5-dc051d693018 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.457034] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860c1ae4-2d71-43ed-9ae9-4b1b1d232a4e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.470971] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c47548-34fd-4b72-9bdd-bed2ec1f504b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.479481] env[61852]: INFO nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Took 33.88 seconds to build instance. [ 826.495274] env[61852]: DEBUG nova.compute.provider_tree [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.498915] env[61852]: INFO nova.compute.manager [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Took 38.05 seconds to build instance. [ 826.645538] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292876, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.795864] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292877, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.806033] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.857731] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.858179] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.858387] env[61852]: INFO nova.compute.manager [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Rebooting instance [ 826.874713] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dfefb50e-ba67-4191-9b7a-463e4943ac2c tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.874979] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dfefb50e-ba67-4191-9b7a-463e4943ac2c tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.875403] env[61852]: DEBUG nova.objects.instance [None req-dfefb50e-ba67-4191-9b7a-463e4943ac2c tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'flavor' on Instance uuid d3922357-383f-4f7e-9c76-4eb688a092b9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 826.907676] env[61852]: DEBUG nova.compute.manager [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 826.941037] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 826.941037] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 826.941202] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.941417] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 826.941585] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.941761] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 826.941984] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 826.942285] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 826.942536] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 826.942796] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 826.943086] env[61852]: DEBUG nova.virt.hardware [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 826.944268] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2cfc64-65cf-4fa1-9f6a-00d1e4f964bf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.953050] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b32c61f-31ba-4f62-8319-14552efa5695 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.982077] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "89970cff-cb49-4803-81a5-1675b0ea4aaf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.739s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.001730] env[61852]: DEBUG nova.scheduler.client.report [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 827.007228] env[61852]: DEBUG oslo_concurrency.lockutils [None req-25630b0e-44be-4cf5-82f6-2004e677b96e tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.624s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.007553] env[61852]: DEBUG nova.network.neutron [req-d73d2a42-a1bd-4d7f-965b-5d56f416d7ec req-29ab962a-89fa-47ff-913b-87b3606aa708 service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Updated VIF entry in instance network info cache for port 72d13320-e518-4f1a-98b0-cb48bcb2fe11. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 827.007842] env[61852]: DEBUG nova.network.neutron [req-d73d2a42-a1bd-4d7f-965b-5d56f416d7ec req-29ab962a-89fa-47ff-913b-87b3606aa708 service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Updating instance_info_cache with network_info: [{"id": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "address": "fa:16:3e:74:13:9d", "network": {"id": "dc45adde-e4fb-4495-a4c3-3373c99a2eb7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1925353850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d65efc960c14799bcf1b26ecdf9c912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d13320-e5", "ovs_interfaceid": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.049220] env[61852]: DEBUG nova.network.neutron [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Updating instance_info_cache with network_info: [{"id": "33def83c-31aa-4bb0-9af4-8c7657457d6f", "address": "fa:16:3e:85:a5:c8", "network": {"id": "50fe42e7-c56b-4943-ae54-bab5c794e094", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-195787110-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6a9b0e24a2545cf877ccef6701fcac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33def83c-31", "ovs_interfaceid": "33def83c-31aa-4bb0-9af4-8c7657457d6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.144185] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292876, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590167} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.144518] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] f8ebb1b7-39c6-486e-ab25-23080d858846/f8ebb1b7-39c6-486e-ab25-23080d858846.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 827.144727] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 827.144977] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c469aca-6233-4fe9-8cdb-03efcdebcc06 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.151650] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 827.151650] env[61852]: value = "task-1292878" [ 827.151650] env[61852]: _type = "Task" [ 827.151650] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.162486] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.293592] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292877, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.379084] env[61852]: DEBUG nova.objects.instance [None req-dfefb50e-ba67-4191-9b7a-463e4943ac2c tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'pci_requests' on Instance uuid d3922357-383f-4f7e-9c76-4eb688a092b9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 827.385853] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.485818] env[61852]: DEBUG nova.compute.manager [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 827.509625] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.637s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.509968] env[61852]: DEBUG nova.compute.manager [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 827.513795] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.601s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.515527] env[61852]: INFO nova.compute.claims [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.518708] env[61852]: DEBUG nova.compute.manager [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 827.521526] env[61852]: DEBUG oslo_concurrency.lockutils [req-d73d2a42-a1bd-4d7f-965b-5d56f416d7ec req-29ab962a-89fa-47ff-913b-87b3606aa708 service nova] Releasing lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.522450] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquired lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.522941] env[61852]: DEBUG nova.network.neutron [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.552136] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Releasing lock "refresh_cache-883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.552462] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Instance network_info: |[{"id": "33def83c-31aa-4bb0-9af4-8c7657457d6f", "address": "fa:16:3e:85:a5:c8", "network": {"id": "50fe42e7-c56b-4943-ae54-bab5c794e094", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-195787110-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6a9b0e24a2545cf877ccef6701fcac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33def83c-31", "ovs_interfaceid": "33def83c-31aa-4bb0-9af4-8c7657457d6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 827.553161] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:a5:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d62c1cf-f39a-4626-9552-f1e13c692636', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33def83c-31aa-4bb0-9af4-8c7657457d6f', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 827.560656] env[61852]: DEBUG oslo.service.loopingcall [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.561755] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 827.561940] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75229a24-3094-4f6b-886e-8b91c5170996 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.586647] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 827.586647] env[61852]: value = "task-1292879" [ 827.586647] env[61852]: _type = "Task" [ 827.586647] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.595071] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292879, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.661122] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.281186} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.661560] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.662340] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa35a7e-af9c-4a25-9069-91cb5f87fe82 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.684985] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] f8ebb1b7-39c6-486e-ab25-23080d858846/f8ebb1b7-39c6-486e-ab25-23080d858846.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.686534] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9ceba01-c38f-4399-917c-8201a0529a43 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.708644] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 827.708644] env[61852]: value = "task-1292880" [ 827.708644] env[61852]: _type = "Task" [ 827.708644] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.717678] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292880, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.794050] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292877, 'name': CreateSnapshot_Task, 'duration_secs': 1.249287} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.794354] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Created Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 827.795150] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bb1cc8-f34f-412d-a388-551271702d75 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.882842] env[61852]: DEBUG nova.objects.base [None req-dfefb50e-ba67-4191-9b7a-463e4943ac2c tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 827.882842] env[61852]: DEBUG nova.network.neutron [None req-dfefb50e-ba67-4191-9b7a-463e4943ac2c tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 828.010604] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dfefb50e-ba67-4191-9b7a-463e4943ac2c tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.135s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.015384] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.023953] env[61852]: DEBUG nova.compute.utils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 828.028464] env[61852]: DEBUG nova.compute.manager [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 828.028464] env[61852]: DEBUG nova.network.neutron [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 828.051118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.053578] env[61852]: DEBUG nova.network.neutron [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Successfully updated port: d6bff739-5602-402b-8bb2-eb9bb4ab0bd7 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 828.086349] env[61852]: DEBUG nova.policy [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be922c40dddf48c8ae436d0a244e7b6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdac3605118e44a69d44ab56cafe2e21', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 828.098513] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292879, 'name': CreateVM_Task} progress is 25%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.177083] env[61852]: DEBUG nova.compute.manager [req-f3f4d642-d0b5-4fb8-b446-c69e82fed5b2 req-27df866a-6105-4813-a0a5-e057aa4e7f84 service nova] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Received event network-vif-plugged-d6bff739-5602-402b-8bb2-eb9bb4ab0bd7 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 828.177160] env[61852]: DEBUG oslo_concurrency.lockutils [req-f3f4d642-d0b5-4fb8-b446-c69e82fed5b2 req-27df866a-6105-4813-a0a5-e057aa4e7f84 service nova] Acquiring lock "b0f8f7dd-e559-43be-b541-c3da48a07d68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.177586] env[61852]: DEBUG oslo_concurrency.lockutils [req-f3f4d642-d0b5-4fb8-b446-c69e82fed5b2 req-27df866a-6105-4813-a0a5-e057aa4e7f84 service nova] Lock "b0f8f7dd-e559-43be-b541-c3da48a07d68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.177586] env[61852]: DEBUG oslo_concurrency.lockutils [req-f3f4d642-d0b5-4fb8-b446-c69e82fed5b2 req-27df866a-6105-4813-a0a5-e057aa4e7f84 service nova] Lock "b0f8f7dd-e559-43be-b541-c3da48a07d68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.177694] env[61852]: DEBUG nova.compute.manager [req-f3f4d642-d0b5-4fb8-b446-c69e82fed5b2 req-27df866a-6105-4813-a0a5-e057aa4e7f84 service nova] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] No waiting events found dispatching network-vif-plugged-d6bff739-5602-402b-8bb2-eb9bb4ab0bd7 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 828.177830] env[61852]: WARNING nova.compute.manager [req-f3f4d642-d0b5-4fb8-b446-c69e82fed5b2 req-27df866a-6105-4813-a0a5-e057aa4e7f84 service nova] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Received unexpected event network-vif-plugged-d6bff739-5602-402b-8bb2-eb9bb4ab0bd7 for instance with vm_state building and task_state spawning. [ 828.204354] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "e265a4be-7b37-40b5-a199-42a7cd945f66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.204707] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.205023] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.205267] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.205512] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.207870] env[61852]: INFO nova.compute.manager [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Terminating instance [ 828.209865] env[61852]: DEBUG nova.compute.manager [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 828.210142] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 828.216744] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad5624c-ca29-46c7-8769-545d07f18b73 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.226759] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.232358] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.232358] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4cf97dec-b647-48d3-83c7-286ef6f1a6fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.236926] env[61852]: DEBUG oslo_vmware.api [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 828.236926] env[61852]: value = "task-1292881" [ 828.236926] env[61852]: _type = "Task" [ 828.236926] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.248262] env[61852]: DEBUG oslo_vmware.api [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.290035] env[61852]: DEBUG nova.network.neutron [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Updating instance_info_cache with network_info: [{"id": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "address": "fa:16:3e:74:13:9d", "network": {"id": "dc45adde-e4fb-4495-a4c3-3373c99a2eb7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1925353850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d65efc960c14799bcf1b26ecdf9c912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d13320-e5", "ovs_interfaceid": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.315360] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Creating linked-clone VM from snapshot {{(pid=61852) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 828.316172] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2ead7f17-2ebb-46b1-8135-d9b942d560d0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.326426] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 828.326426] env[61852]: value = "task-1292882" [ 828.326426] env[61852]: _type = "Task" [ 828.326426] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.338171] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292882, 'name': CloneVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.362318] env[61852]: DEBUG nova.compute.manager [req-9eb491ae-16de-4873-a64f-7bcd6f5c68f0 req-077f82c6-fa16-4397-b801-3361d79c2e34 service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Received event network-changed-33def83c-31aa-4bb0-9af4-8c7657457d6f {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 828.362730] env[61852]: DEBUG nova.compute.manager [req-9eb491ae-16de-4873-a64f-7bcd6f5c68f0 req-077f82c6-fa16-4397-b801-3361d79c2e34 service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Refreshing instance network info cache due to event network-changed-33def83c-31aa-4bb0-9af4-8c7657457d6f. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 828.363210] env[61852]: DEBUG oslo_concurrency.lockutils [req-9eb491ae-16de-4873-a64f-7bcd6f5c68f0 req-077f82c6-fa16-4397-b801-3361d79c2e34 service nova] Acquiring lock "refresh_cache-883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.363587] env[61852]: DEBUG oslo_concurrency.lockutils [req-9eb491ae-16de-4873-a64f-7bcd6f5c68f0 req-077f82c6-fa16-4397-b801-3361d79c2e34 service nova] Acquired lock "refresh_cache-883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.363856] env[61852]: DEBUG nova.network.neutron [req-9eb491ae-16de-4873-a64f-7bcd6f5c68f0 req-077f82c6-fa16-4397-b801-3361d79c2e34 service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Refreshing network info cache for port 33def83c-31aa-4bb0-9af4-8c7657457d6f {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 828.385895] env[61852]: DEBUG nova.network.neutron [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Successfully created port: ea60304c-08b8-4035-8ece-fc40b1b508b1 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.529185] env[61852]: DEBUG nova.compute.manager [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 828.556306] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-b0f8f7dd-e559-43be-b541-c3da48a07d68" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.556496] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-b0f8f7dd-e559-43be-b541-c3da48a07d68" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.556634] env[61852]: DEBUG nova.network.neutron [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.600683] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292879, 'name': CreateVM_Task, 'duration_secs': 0.656404} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.603681] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 828.604601] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.604777] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.605113] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 828.605378] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec310c6f-cf4d-4f95-80e5-148793715ba7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.610377] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 828.610377] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c04393-320c-cfda-5dd6-1363052ef6a9" [ 828.610377] env[61852]: _type = "Task" [ 828.610377] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.618894] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c04393-320c-cfda-5dd6-1363052ef6a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.720111] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292880, 'name': ReconfigVM_Task, 'duration_secs': 0.569703} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.720111] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Reconfigured VM instance instance-00000040 to attach disk [datastore2] f8ebb1b7-39c6-486e-ab25-23080d858846/f8ebb1b7-39c6-486e-ab25-23080d858846.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 828.720718] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7eaf0bea-543f-42f5-a349-1ff22694f09f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.727550] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 828.727550] env[61852]: value = "task-1292883" [ 828.727550] env[61852]: _type = "Task" [ 828.727550] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.738808] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292883, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.749625] env[61852]: DEBUG oslo_vmware.api [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292881, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.793131] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Releasing lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.796507] env[61852]: DEBUG nova.compute.manager [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 828.797837] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaaec8b5-2179-4946-9718-fdb29f641ba1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.841545] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292882, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.861689] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708fc5fb-3a46-447b-9f7f-e6b02bbe8507 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.871764] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e7e9d4-566f-43ff-82bd-54e50837b03b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.905511] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6bcb39-8c02-4eeb-8b73-57c6c8412853 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.911318] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef64a5b-3f82-4ced-b547-45a01cddcdab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.929979] env[61852]: DEBUG nova.compute.provider_tree [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 829.096932] env[61852]: DEBUG nova.network.neutron [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.127700] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c04393-320c-cfda-5dd6-1363052ef6a9, 'name': SearchDatastore_Task, 'duration_secs': 0.019766} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.128725] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.129133] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.129468] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.129681] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.129930] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.130302] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b272959-c3b2-49b1-a6e3-7ab5183b4f3f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.146194] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.146194] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 829.146588] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0659c9e6-e9a9-4451-ada4-a6de328a4f35 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.154377] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 829.154377] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b3971b-4ba7-7567-27f7-93f40a1a7cfb" [ 829.154377] env[61852]: _type = "Task" [ 829.154377] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.162927] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b3971b-4ba7-7567-27f7-93f40a1a7cfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.203206] env[61852]: DEBUG nova.network.neutron [req-9eb491ae-16de-4873-a64f-7bcd6f5c68f0 req-077f82c6-fa16-4397-b801-3361d79c2e34 service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Updated VIF entry in instance network info cache for port 33def83c-31aa-4bb0-9af4-8c7657457d6f. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 829.203682] env[61852]: DEBUG nova.network.neutron [req-9eb491ae-16de-4873-a64f-7bcd6f5c68f0 req-077f82c6-fa16-4397-b801-3361d79c2e34 service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Updating instance_info_cache with network_info: [{"id": "33def83c-31aa-4bb0-9af4-8c7657457d6f", "address": "fa:16:3e:85:a5:c8", "network": {"id": "50fe42e7-c56b-4943-ae54-bab5c794e094", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-195787110-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6a9b0e24a2545cf877ccef6701fcac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33def83c-31", "ovs_interfaceid": "33def83c-31aa-4bb0-9af4-8c7657457d6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.237862] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292883, 'name': Rename_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.251880] env[61852]: DEBUG oslo_vmware.api [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292881, 'name': PowerOffVM_Task, 'duration_secs': 0.615483} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.251880] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 829.251880] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 829.251880] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-815e938c-3433-49e6-8695-3be50a12e286 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.257333] env[61852]: DEBUG nova.network.neutron [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Updating instance_info_cache with network_info: [{"id": "d6bff739-5602-402b-8bb2-eb9bb4ab0bd7", "address": "fa:16:3e:35:51:53", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6bff739-56", "ovs_interfaceid": "d6bff739-5602-402b-8bb2-eb9bb4ab0bd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.338807] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292882, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.427552] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 829.427952] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 829.428227] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Deleting the datastore file [datastore1] e265a4be-7b37-40b5-a199-42a7cd945f66 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 829.428620] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1d86ade-7a11-40bf-8d9a-12ab87e085fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.437949] env[61852]: DEBUG oslo_vmware.api [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 829.437949] env[61852]: value = "task-1292885" [ 829.437949] env[61852]: _type = "Task" [ 829.437949] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.448364] env[61852]: DEBUG oslo_vmware.api [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292885, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.455569] env[61852]: ERROR nova.scheduler.client.report [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [req-aa2a58ab-82b3-4e17-b38c-6fae29e56766] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-aa2a58ab-82b3-4e17-b38c-6fae29e56766"}]} [ 829.475589] env[61852]: DEBUG nova.scheduler.client.report [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 829.492413] env[61852]: DEBUG nova.scheduler.client.report [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 829.492842] env[61852]: DEBUG nova.compute.provider_tree [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 829.511071] env[61852]: DEBUG nova.scheduler.client.report [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 829.532031] env[61852]: DEBUG nova.scheduler.client.report [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 829.541164] env[61852]: DEBUG nova.compute.manager [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 829.667940] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b3971b-4ba7-7567-27f7-93f40a1a7cfb, 'name': SearchDatastore_Task, 'duration_secs': 0.01671} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.671559] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3093720d-824a-485f-b350-c52a546d7f26 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.677484] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 829.677484] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52bcdfa9-0794-da96-9dd1-da84f5b91d1d" [ 829.677484] env[61852]: _type = "Task" [ 829.677484] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.687554] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52bcdfa9-0794-da96-9dd1-da84f5b91d1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.707503] env[61852]: DEBUG oslo_concurrency.lockutils [req-9eb491ae-16de-4873-a64f-7bcd6f5c68f0 req-077f82c6-fa16-4397-b801-3361d79c2e34 service nova] Releasing lock "refresh_cache-883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.736808] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.736808] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.737597] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.737981] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.738355] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.738712] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.739385] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.739608] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.739833] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.740090] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.740290] env[61852]: DEBUG nova.virt.hardware [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.741355] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e39c19-619b-4f24-915f-a62c681534dc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.760708] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292883, 'name': Rename_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.761024] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-b0f8f7dd-e559-43be-b541-c3da48a07d68" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.761331] env[61852]: DEBUG nova.compute.manager [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Instance network_info: |[{"id": "d6bff739-5602-402b-8bb2-eb9bb4ab0bd7", "address": "fa:16:3e:35:51:53", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6bff739-56", "ovs_interfaceid": "d6bff739-5602-402b-8bb2-eb9bb4ab0bd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 829.768825] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:51:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6bff739-5602-402b-8bb2-eb9bb4ab0bd7', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.776590] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Creating folder: Project (14a017ea2b084ae0ad2994dda7809c7c). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.780579] env[61852]: DEBUG oslo_concurrency.lockutils [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.781172] env[61852]: DEBUG oslo_concurrency.lockutils [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.781456] env[61852]: DEBUG nova.objects.instance [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'flavor' on Instance uuid d3922357-383f-4f7e-9c76-4eb688a092b9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 829.785040] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08ea8415-4d4e-43a0-a1ca-0bb873cbe041 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.787360] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9c69e6-18b3-42be-9f4d-c102401cb430 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.810302] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Created folder: Project (14a017ea2b084ae0ad2994dda7809c7c) in parent group-v277280. [ 829.810587] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Creating folder: Instances. Parent ref: group-v277343. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.812292] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2f6eb0b-ef4b-4e1f-b82c-8c50078b3ed7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.823713] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Created folder: Instances in parent group-v277343. [ 829.824208] env[61852]: DEBUG oslo.service.loopingcall [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.828191] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8532e9bf-f8fd-4494-ad6c-76fe07bb78ec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.832706] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.833592] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a47cd6c-311a-4713-ba72-75d52b40ca51 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.859372] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Doing hard reboot of VM {{(pid=61852) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 829.864060] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-bf154b17-df21-4859-8896-7d773e12b1b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.865770] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.865770] env[61852]: value = "task-1292888" [ 829.865770] env[61852]: _type = "Task" [ 829.865770] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.866532] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292882, 'name': CloneVM_Task} progress is 95%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.873714] env[61852]: DEBUG oslo_vmware.api [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 829.873714] env[61852]: value = "task-1292889" [ 829.873714] env[61852]: _type = "Task" [ 829.873714] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.881535] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292888, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.890658] env[61852]: DEBUG oslo_vmware.api [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292889, 'name': ResetVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.952144] env[61852]: DEBUG oslo_vmware.api [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1292885, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.320467} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.952144] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 829.952144] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 829.952144] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 829.952144] env[61852]: INFO nova.compute.manager [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Took 1.74 seconds to destroy the instance on the hypervisor. [ 829.952144] env[61852]: DEBUG oslo.service.loopingcall [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.952497] env[61852]: DEBUG nova.compute.manager [-] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 829.952497] env[61852]: DEBUG nova.network.neutron [-] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.973225] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ca972d-a396-4fd5-8d97-6fe4540ebc9c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.980845] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b544f320-ebf3-4017-a346-f5c65528b52f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.018864] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b19aeb-74f8-4c54-9319-fb3a9cc3d266 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.026763] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1b04eb-b5fc-490d-a1c5-53ae4c1dd070 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.041436] env[61852]: DEBUG nova.compute.provider_tree [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 830.069829] env[61852]: DEBUG nova.network.neutron [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Successfully updated port: ea60304c-08b8-4035-8ece-fc40b1b508b1 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 830.190349] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52bcdfa9-0794-da96-9dd1-da84f5b91d1d, 'name': SearchDatastore_Task, 'duration_secs': 0.013396} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.190687] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.191254] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c/883a0d5a-f775-4ffc-abf0-921d0ea6cc8c.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 830.191647] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-214620ae-5748-4101-b356-068c93e09bdd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.199889] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 830.199889] env[61852]: value = "task-1292890" [ 830.199889] env[61852]: _type = "Task" [ 830.199889] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.210795] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.243165] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292883, 'name': Rename_Task, 'duration_secs': 1.178897} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.243733] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 830.245161] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc723ef5-3369-4cca-8c34-0d4264273787 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.252505] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 830.252505] env[61852]: value = "task-1292891" [ 830.252505] env[61852]: _type = "Task" [ 830.252505] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.266546] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292891, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.345022] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292882, 'name': CloneVM_Task, 'duration_secs': 1.638876} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.345744] env[61852]: INFO nova.virt.vmwareapi.vmops [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Created linked-clone VM from snapshot [ 830.346598] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba8a68a-4ce4-45f3-836b-07bb00de23d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.354520] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Uploading image bec16c2f-506b-41d5-a8e9-28662d5a12ee {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 830.367696] env[61852]: DEBUG nova.compute.manager [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Received event network-changed-d6bff739-5602-402b-8bb2-eb9bb4ab0bd7 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 830.368290] env[61852]: DEBUG nova.compute.manager [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Refreshing instance network info cache due to event network-changed-d6bff739-5602-402b-8bb2-eb9bb4ab0bd7. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 830.368577] env[61852]: DEBUG oslo_concurrency.lockutils [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] Acquiring lock "refresh_cache-b0f8f7dd-e559-43be-b541-c3da48a07d68" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.368770] env[61852]: DEBUG oslo_concurrency.lockutils [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] Acquired lock "refresh_cache-b0f8f7dd-e559-43be-b541-c3da48a07d68" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.368853] env[61852]: DEBUG nova.network.neutron [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Refreshing network info cache for port d6bff739-5602-402b-8bb2-eb9bb4ab0bd7 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.383143] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292888, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.386420] env[61852]: DEBUG oslo_vmware.rw_handles [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 830.386420] env[61852]: value = "vm-277342" [ 830.386420] env[61852]: _type = "VirtualMachine" [ 830.386420] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 830.386908] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-901b4b3f-d390-48f0-a319-17610ad78f71 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.392912] env[61852]: DEBUG oslo_vmware.api [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292889, 'name': ResetVM_Task, 'duration_secs': 0.1487} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.393991] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Did hard reboot of VM {{(pid=61852) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 830.393991] env[61852]: DEBUG nova.compute.manager [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 830.394796] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6ac389-7337-410a-86bb-52e27799d6d2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.399927] env[61852]: DEBUG oslo_vmware.rw_handles [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lease: (returnval){ [ 830.399927] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233eca3-305a-3737-52ef-3886de6fa9d7" [ 830.399927] env[61852]: _type = "HttpNfcLease" [ 830.399927] env[61852]: } obtained for exporting VM: (result){ [ 830.399927] env[61852]: value = "vm-277342" [ 830.399927] env[61852]: _type = "VirtualMachine" [ 830.399927] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 830.400295] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the lease: (returnval){ [ 830.400295] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233eca3-305a-3737-52ef-3886de6fa9d7" [ 830.400295] env[61852]: _type = "HttpNfcLease" [ 830.400295] env[61852]: } to be ready. {{(pid=61852) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 830.413126] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 830.413126] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233eca3-305a-3737-52ef-3886de6fa9d7" [ 830.413126] env[61852]: _type = "HttpNfcLease" [ 830.413126] env[61852]: } is ready. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 830.413560] env[61852]: DEBUG oslo_vmware.rw_handles [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 830.413560] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233eca3-305a-3737-52ef-3886de6fa9d7" [ 830.413560] env[61852]: _type = "HttpNfcLease" [ 830.413560] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 830.414435] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d810b18-a1f7-4298-a4d2-3e460d882b7f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.421934] env[61852]: DEBUG oslo_vmware.rw_handles [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c085e5-e330-17cd-640c-b86970392ca9/disk-0.vmdk from lease info. {{(pid=61852) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 830.422163] env[61852]: DEBUG oslo_vmware.rw_handles [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c085e5-e330-17cd-640c-b86970392ca9/disk-0.vmdk for reading. {{(pid=61852) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 830.495881] env[61852]: DEBUG nova.objects.instance [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'pci_requests' on Instance uuid d3922357-383f-4f7e-9c76-4eb688a092b9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 830.573197] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.573612] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.573612] env[61852]: DEBUG nova.network.neutron [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.592346] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-82d4bdaf-0871-42c1-b819-0da58d6df13b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.598824] env[61852]: DEBUG nova.scheduler.client.report [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 830.598824] env[61852]: DEBUG nova.compute.provider_tree [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 87 to 88 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 830.599594] env[61852]: DEBUG nova.compute.provider_tree [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 830.715483] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292890, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.765646] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292891, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.888172] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292888, 'name': CreateVM_Task, 'duration_secs': 0.530204} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.888424] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.889360] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.889660] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.890053] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 830.890374] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70fe1d02-54b1-4324-adfd-7892e6044906 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.897149] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 830.897149] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f82292-7a56-9b40-79e2-a49d5a3065d7" [ 830.897149] env[61852]: _type = "Task" [ 830.897149] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.908680] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f82292-7a56-9b40-79e2-a49d5a3065d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.912925] env[61852]: DEBUG oslo_concurrency.lockutils [None req-30fb9177-9d8a-422e-a122-295106d5f669 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.055s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.998609] env[61852]: DEBUG nova.objects.base [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 830.999279] env[61852]: DEBUG nova.network.neutron [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 831.079804] env[61852]: DEBUG nova.policy [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f04d129452d4eb79514c52a6972af0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e93a6965a6884292bc56b01f7d54a622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 831.105424] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.592s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.109193] env[61852]: DEBUG nova.compute.manager [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 831.110427] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.809s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.112301] env[61852]: INFO nova.compute.claims [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 831.116061] env[61852]: DEBUG nova.network.neutron [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.179993] env[61852]: DEBUG nova.network.neutron [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Updated VIF entry in instance network info cache for port d6bff739-5602-402b-8bb2-eb9bb4ab0bd7. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 831.180396] env[61852]: DEBUG nova.network.neutron [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Updating instance_info_cache with network_info: [{"id": "d6bff739-5602-402b-8bb2-eb9bb4ab0bd7", "address": "fa:16:3e:35:51:53", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6bff739-56", "ovs_interfaceid": "d6bff739-5602-402b-8bb2-eb9bb4ab0bd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.214554] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292890, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.64749} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.215106] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c/883a0d5a-f775-4ffc-abf0-921d0ea6cc8c.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 831.215106] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 831.215365] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09a29d2f-818f-4a1f-a228-c96228d06d82 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.224686] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 831.224686] env[61852]: value = "task-1292893" [ 831.224686] env[61852]: _type = "Task" [ 831.224686] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.237789] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.263236] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292891, 'name': PowerOnVM_Task, 'duration_secs': 0.753914} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.263597] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 831.263808] env[61852]: INFO nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Took 10.54 seconds to spawn the instance on the hypervisor. [ 831.264425] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 831.265283] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a63001-6c68-4923-9cf8-342f14ed1da9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.332472] env[61852]: DEBUG nova.network.neutron [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Updating instance_info_cache with network_info: [{"id": "ea60304c-08b8-4035-8ece-fc40b1b508b1", "address": "fa:16:3e:8c:e4:12", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea60304c-08", "ovs_interfaceid": "ea60304c-08b8-4035-8ece-fc40b1b508b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.382118] env[61852]: DEBUG nova.network.neutron [-] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.388030] env[61852]: DEBUG nova.network.neutron [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Successfully created port: b3f3d9b5-9c27-4415-b02c-58c0b1133727 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.408666] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f82292-7a56-9b40-79e2-a49d5a3065d7, 'name': SearchDatastore_Task, 'duration_secs': 0.015156} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.409198] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.409562] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.409920] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.410106] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.410328] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.410682] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fc95d77-15f7-47cb-a5c4-91b55dfe7254 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.423273] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.423589] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 831.424471] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a25e625-daeb-4406-bda5-134f9ded97ae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.429994] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 831.429994] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a82f19-c0a9-a077-7667-6c17588f5084" [ 831.429994] env[61852]: _type = "Task" [ 831.429994] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.439787] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a82f19-c0a9-a077-7667-6c17588f5084, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.620932] env[61852]: DEBUG nova.compute.utils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 831.624723] env[61852]: DEBUG nova.compute.manager [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 831.624950] env[61852]: DEBUG nova.network.neutron [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 831.684787] env[61852]: DEBUG oslo_concurrency.lockutils [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] Releasing lock "refresh_cache-b0f8f7dd-e559-43be-b541-c3da48a07d68" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.685150] env[61852]: DEBUG nova.compute.manager [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Received event network-vif-plugged-ea60304c-08b8-4035-8ece-fc40b1b508b1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 831.685418] env[61852]: DEBUG oslo_concurrency.lockutils [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] Acquiring lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.685656] env[61852]: DEBUG oslo_concurrency.lockutils [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.685900] env[61852]: DEBUG oslo_concurrency.lockutils [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.686105] env[61852]: DEBUG nova.compute.manager [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] No waiting events found dispatching network-vif-plugged-ea60304c-08b8-4035-8ece-fc40b1b508b1 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 831.686283] env[61852]: WARNING nova.compute.manager [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Received unexpected event network-vif-plugged-ea60304c-08b8-4035-8ece-fc40b1b508b1 for instance with vm_state building and task_state spawning. [ 831.686483] env[61852]: DEBUG nova.compute.manager [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Received event network-changed-ea60304c-08b8-4035-8ece-fc40b1b508b1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 831.686902] env[61852]: DEBUG nova.compute.manager [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Refreshing instance network info cache due to event network-changed-ea60304c-08b8-4035-8ece-fc40b1b508b1. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 831.686902] env[61852]: DEBUG oslo_concurrency.lockutils [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] Acquiring lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.706188] env[61852]: DEBUG nova.policy [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1349b8262e345068742af657fa8cbd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4dbb543c66364861bf5f437c8c33a550', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 831.737177] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14345} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.737177] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 831.737177] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fa1c2e-c526-4ece-b302-7ee652fc707a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.763225] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c/883a0d5a-f775-4ffc-abf0-921d0ea6cc8c.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.764092] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26b0a905-9830-472d-ae16-35d89a4e4d81 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.796886] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 831.796886] env[61852]: value = "task-1292894" [ 831.796886] env[61852]: _type = "Task" [ 831.796886] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.797550] env[61852]: INFO nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Took 37.52 seconds to build instance. [ 831.809787] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292894, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.837423] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.837762] env[61852]: DEBUG nova.compute.manager [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Instance network_info: |[{"id": "ea60304c-08b8-4035-8ece-fc40b1b508b1", "address": "fa:16:3e:8c:e4:12", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea60304c-08", "ovs_interfaceid": "ea60304c-08b8-4035-8ece-fc40b1b508b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 831.838462] env[61852]: DEBUG oslo_concurrency.lockutils [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] Acquired lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.838462] env[61852]: DEBUG nova.network.neutron [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Refreshing network info cache for port ea60304c-08b8-4035-8ece-fc40b1b508b1 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.839550] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:e4:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea60304c-08b8-4035-8ece-fc40b1b508b1', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.852427] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Creating folder: Project (bdac3605118e44a69d44ab56cafe2e21). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.856246] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f681246-54d9-4e96-8062-f9c4cc552fcc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.870489] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Created folder: Project (bdac3605118e44a69d44ab56cafe2e21) in parent group-v277280. [ 831.870489] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Creating folder: Instances. Parent ref: group-v277346. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.870489] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a64e70fd-8453-4f09-8444-79d8efb0a507 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.877138] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Created folder: Instances in parent group-v277346. [ 831.877479] env[61852]: DEBUG oslo.service.loopingcall [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.877768] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.877999] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c24cb60-0f7a-49bb-91cd-262d5fbfa245 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.895675] env[61852]: INFO nova.compute.manager [-] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Took 1.94 seconds to deallocate network for instance. [ 831.904388] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.904388] env[61852]: value = "task-1292897" [ 831.904388] env[61852]: _type = "Task" [ 831.904388] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.915436] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292897, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.940438] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a82f19-c0a9-a077-7667-6c17588f5084, 'name': SearchDatastore_Task, 'duration_secs': 0.02468} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.944566] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99f95bcd-0cd6-4438-be85-cde9d0320e3f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.951249] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 831.951249] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52cb4c08-ebb3-ff0e-8d6d-dc6ec74a8cf3" [ 831.951249] env[61852]: _type = "Task" [ 831.951249] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.960061] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52cb4c08-ebb3-ff0e-8d6d-dc6ec74a8cf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.094494] env[61852]: DEBUG nova.network.neutron [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Updated VIF entry in instance network info cache for port ea60304c-08b8-4035-8ece-fc40b1b508b1. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.095127] env[61852]: DEBUG nova.network.neutron [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Updating instance_info_cache with network_info: [{"id": "ea60304c-08b8-4035-8ece-fc40b1b508b1", "address": "fa:16:3e:8c:e4:12", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea60304c-08", "ovs_interfaceid": "ea60304c-08b8-4035-8ece-fc40b1b508b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.128267] env[61852]: DEBUG nova.compute.manager [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 832.173266] env[61852]: DEBUG nova.network.neutron [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Successfully created port: 17a779c7-0b48-479d-88e1-f5dc1ec4eab9 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 832.307879] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "f8ebb1b7-39c6-486e-ab25-23080d858846" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.023s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.313381] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292894, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.402845] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.418597] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292897, 'name': CreateVM_Task, 'duration_secs': 0.478332} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.418953] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.419730] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.420211] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.420812] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 832.421284] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56444cff-c14e-4fe1-b117-938e4667d054 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.433021] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 832.433021] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52630d65-5b39-8491-5e07-f1f354f778bb" [ 832.433021] env[61852]: _type = "Task" [ 832.433021] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.443412] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52630d65-5b39-8491-5e07-f1f354f778bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.461246] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52cb4c08-ebb3-ff0e-8d6d-dc6ec74a8cf3, 'name': SearchDatastore_Task, 'duration_secs': 0.015693} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.462180] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.462552] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] b0f8f7dd-e559-43be-b541-c3da48a07d68/b0f8f7dd-e559-43be-b541-c3da48a07d68.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 832.463012] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-987581ed-aec4-429b-93fd-bf9bd75cf3ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.473183] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 832.473183] env[61852]: value = "task-1292898" [ 832.473183] env[61852]: _type = "Task" [ 832.473183] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.481812] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292898, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.508249] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e549af5c-b8c3-47e8-a3d7-b507ed87a178 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.515920] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b869228-3694-4b04-8ab4-92db330c9cde {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.548463] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21117aa-515d-43f8-a9e2-fdbfdae1a3c6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.557987] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579f0fa9-b483-4b23-8bb8-816dbd613155 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.574917] env[61852]: DEBUG nova.compute.provider_tree [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.598200] env[61852]: DEBUG oslo_concurrency.lockutils [req-9de5911f-1312-400d-81a5-afc96d88f3ed req-260681aa-ad80-41cc-90c6-2930c5352d79 service nova] Releasing lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.809031] env[61852]: DEBUG nova.compute.manager [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 832.819448] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292894, 'name': ReconfigVM_Task, 'duration_secs': 0.579414} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.819961] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c/883a0d5a-f775-4ffc-abf0-921d0ea6cc8c.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.821067] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d2f7830-f1c4-4572-82e8-c29da5624bcb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.830159] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 832.830159] env[61852]: value = "task-1292899" [ 832.830159] env[61852]: _type = "Task" [ 832.830159] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.841179] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292899, 'name': Rename_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.922504] env[61852]: DEBUG nova.network.neutron [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Successfully updated port: b3f3d9b5-9c27-4415-b02c-58c0b1133727 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.933652] env[61852]: DEBUG nova.compute.manager [req-b38a768d-a7a3-4cf7-8f71-7a94c10d7c6d req-79591121-00a2-4531-8d39-3daaad72b8b6 service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received event network-vif-deleted-8dd44cd3-3b97-484e-b3ed-ddb88f224343 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 832.934239] env[61852]: DEBUG nova.compute.manager [req-b38a768d-a7a3-4cf7-8f71-7a94c10d7c6d req-79591121-00a2-4531-8d39-3daaad72b8b6 service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received event network-vif-deleted-819604bb-f7cf-449f-8681-bf4901e756e1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 832.934670] env[61852]: DEBUG nova.compute.manager [req-b38a768d-a7a3-4cf7-8f71-7a94c10d7c6d req-79591121-00a2-4531-8d39-3daaad72b8b6 service nova] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Received event network-vif-deleted-92411dba-21d2-474e-9b4f-cda4bea94122 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 832.949273] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52630d65-5b39-8491-5e07-f1f354f778bb, 'name': SearchDatastore_Task, 'duration_secs': 0.017561} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.949746] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.950009] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.950291] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.950474] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.950629] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.951056] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-facc9a56-1fc3-4d0e-b9e7-2ce32dad93d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.969028] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.969028] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 832.969028] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca045cce-256e-498a-a0da-4265089ae0a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.975574] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 832.975574] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5215f416-b044-dac5-9234-8b61088ef451" [ 832.975574] env[61852]: _type = "Task" [ 832.975574] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.987567] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5215f416-b044-dac5-9234-8b61088ef451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.990846] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292898, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.078721] env[61852]: DEBUG nova.scheduler.client.report [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 833.137156] env[61852]: DEBUG nova.compute.manager [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 833.159535] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 833.160244] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 833.160244] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.160328] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 833.160420] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.160578] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 833.160854] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 833.161118] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 833.161332] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 833.161509] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 833.161707] env[61852]: DEBUG nova.virt.hardware [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 833.162647] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd260ef-56a2-4f4f-b779-d078e24e6272 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.230444] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46e9636-eaae-4f10-9e8e-00edaf3dc5c7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.310816] env[61852]: DEBUG nova.compute.manager [req-a135200e-a399-4114-890b-8543d61f7423 req-21c5aec1-2874-4640-a8d4-1f7475f1879b service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Received event network-changed-72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 833.312035] env[61852]: DEBUG nova.compute.manager [req-a135200e-a399-4114-890b-8543d61f7423 req-21c5aec1-2874-4640-a8d4-1f7475f1879b service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Refreshing instance network info cache due to event network-changed-72d13320-e518-4f1a-98b0-cb48bcb2fe11. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 833.312035] env[61852]: DEBUG oslo_concurrency.lockutils [req-a135200e-a399-4114-890b-8543d61f7423 req-21c5aec1-2874-4640-a8d4-1f7475f1879b service nova] Acquiring lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.312035] env[61852]: DEBUG oslo_concurrency.lockutils [req-a135200e-a399-4114-890b-8543d61f7423 req-21c5aec1-2874-4640-a8d4-1f7475f1879b service nova] Acquired lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.312035] env[61852]: DEBUG nova.network.neutron [req-a135200e-a399-4114-890b-8543d61f7423 req-21c5aec1-2874-4640-a8d4-1f7475f1879b service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Refreshing network info cache for port 72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.331590] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.340547] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292899, 'name': Rename_Task, 'duration_secs': 0.295443} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.340834] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 833.341100] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8292ba2-ee39-40e6-a448-b5f2244d299d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.347528] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 833.347528] env[61852]: value = "task-1292900" [ 833.347528] env[61852]: _type = "Task" [ 833.347528] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.356380] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292900, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.425219] env[61852]: DEBUG oslo_concurrency.lockutils [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.425492] env[61852]: DEBUG oslo_concurrency.lockutils [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.425763] env[61852]: DEBUG nova.network.neutron [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.486577] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292898, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.701468} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.487312] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] b0f8f7dd-e559-43be-b541-c3da48a07d68/b0f8f7dd-e559-43be-b541-c3da48a07d68.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 833.487556] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 833.487816] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4630b098-b057-44a0-8bd0-a938c0b0b755 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.493276] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5215f416-b044-dac5-9234-8b61088ef451, 'name': SearchDatastore_Task, 'duration_secs': 0.064129} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.494636] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19685d35-bd9d-47b4-979e-9e53ce42503e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.498488] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 833.498488] env[61852]: value = "task-1292901" [ 833.498488] env[61852]: _type = "Task" [ 833.498488] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.503748] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 833.503748] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52cffad1-3718-1498-1237-7d96aaeeae2b" [ 833.503748] env[61852]: _type = "Task" [ 833.503748] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.510869] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292901, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.516143] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52cffad1-3718-1498-1237-7d96aaeeae2b, 'name': SearchDatastore_Task, 'duration_secs': 0.010999} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.516347] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.516629] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9/00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 833.516896] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cfb6df01-c79a-4b42-b9e5-798b108ff17c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.523881] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 833.523881] env[61852]: value = "task-1292902" [ 833.523881] env[61852]: _type = "Task" [ 833.523881] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.533336] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292902, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.584589] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.585229] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 833.588188] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.315s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.588382] env[61852]: DEBUG nova.objects.instance [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lazy-loading 'resources' on Instance uuid b0d38886-aacb-4b7e-9530-c5891d9cee66 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 833.862916] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292900, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.949353] env[61852]: DEBUG nova.network.neutron [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Successfully updated port: 17a779c7-0b48-479d-88e1-f5dc1ec4eab9 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 833.987040] env[61852]: WARNING nova.network.neutron [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] d984a6fb-5f5f-4678-bc8a-3723c26f290a already exists in list: networks containing: ['d984a6fb-5f5f-4678-bc8a-3723c26f290a']. ignoring it [ 834.009131] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292901, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090664} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.009469] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 834.010319] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58c2841-105c-4ad9-8949-f7ca18f7d85e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.036990] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] b0f8f7dd-e559-43be-b541-c3da48a07d68/b0f8f7dd-e559-43be-b541-c3da48a07d68.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.041936] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a3eec64-3307-4e0e-a30c-758978166e4d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.069287] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292902, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.070892] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 834.070892] env[61852]: value = "task-1292903" [ 834.070892] env[61852]: _type = "Task" [ 834.070892] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.082694] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292903, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.091697] env[61852]: DEBUG nova.compute.utils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 834.098913] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 834.098913] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 834.154953] env[61852]: DEBUG nova.policy [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52bf0aad002740e28da26a9e1d6b14da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '856d91d948e84ab69536db1faebf54ee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 834.207604] env[61852]: DEBUG nova.network.neutron [req-a135200e-a399-4114-890b-8543d61f7423 req-21c5aec1-2874-4640-a8d4-1f7475f1879b service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Updated VIF entry in instance network info cache for port 72d13320-e518-4f1a-98b0-cb48bcb2fe11. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.208012] env[61852]: DEBUG nova.network.neutron [req-a135200e-a399-4114-890b-8543d61f7423 req-21c5aec1-2874-4640-a8d4-1f7475f1879b service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Updating instance_info_cache with network_info: [{"id": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "address": "fa:16:3e:74:13:9d", "network": {"id": "dc45adde-e4fb-4495-a4c3-3373c99a2eb7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1925353850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d65efc960c14799bcf1b26ecdf9c912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d13320-e5", "ovs_interfaceid": "72d13320-e518-4f1a-98b0-cb48bcb2fe11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.225491] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.225750] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.228098] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.228098] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.228098] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.229376] env[61852]: INFO nova.compute.manager [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Terminating instance [ 834.230655] env[61852]: DEBUG nova.compute.manager [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 834.230849] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.231773] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc163141-372f-4c5b-8a15-a75c358a321c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.242369] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.242670] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b404520-7f19-473f-9fd5-c7a50ddabed9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.251597] env[61852]: DEBUG oslo_vmware.api [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 834.251597] env[61852]: value = "task-1292904" [ 834.251597] env[61852]: _type = "Task" [ 834.251597] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.264979] env[61852]: DEBUG oslo_vmware.api [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292904, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.360610] env[61852]: DEBUG oslo_vmware.api [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292900, 'name': PowerOnVM_Task, 'duration_secs': 0.673845} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.365494] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 834.365757] env[61852]: INFO nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Took 11.13 seconds to spawn the instance on the hypervisor. [ 834.365937] env[61852]: DEBUG nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 834.367180] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b485f88-ecd0-461d-b9b1-21eb8ece86ae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.412678] env[61852]: DEBUG nova.network.neutron [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3f3d9b5-9c27-4415-b02c-58c0b1133727", "address": "fa:16:3e:ab:83:17", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3f3d9b5-9c", "ovs_interfaceid": "b3f3d9b5-9c27-4415-b02c-58c0b1133727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.452587] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "refresh_cache-d93b8055-1eb2-4368-a051-289dc5a9d0ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.452742] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "refresh_cache-d93b8055-1eb2-4368-a051-289dc5a9d0ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.452904] env[61852]: DEBUG nova.network.neutron [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 834.464276] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfa4849-6b6e-49bf-9a53-df0375280938 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.475240] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dccc34c-211f-44d1-a1d8-f621c4825243 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.516664] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374ea99b-e676-4df0-af14-a4dd3aca2c82 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.524636] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe00ba9-9134-46a1-ad94-00063894e734 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.544314] env[61852]: DEBUG nova.compute.provider_tree [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.548416] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292902, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586662} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.548416] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9/00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 834.548416] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 834.548416] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f8b89350-c808-4a34-ae7b-3300fb995fcd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.552036] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Successfully created port: ebf57cb3-0f32-48ff-a39a-bc6c32d02167 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.556352] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 834.556352] env[61852]: value = "task-1292905" [ 834.556352] env[61852]: _type = "Task" [ 834.556352] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.565906] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.581550] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292903, 'name': ReconfigVM_Task, 'duration_secs': 0.368746} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.581862] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Reconfigured VM instance instance-00000042 to attach disk [datastore2] b0f8f7dd-e559-43be-b541-c3da48a07d68/b0f8f7dd-e559-43be-b541-c3da48a07d68.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.582639] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-356699b8-d867-4fd9-83be-20dc642f18f7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.589428] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 834.589428] env[61852]: value = "task-1292906" [ 834.589428] env[61852]: _type = "Task" [ 834.589428] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.601039] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 834.603942] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292906, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.710479] env[61852]: DEBUG oslo_concurrency.lockutils [req-a135200e-a399-4114-890b-8543d61f7423 req-21c5aec1-2874-4640-a8d4-1f7475f1879b service nova] Releasing lock "refresh_cache-0ec1210f-7d42-4b71-abdc-9f818ffb91ea" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.766852] env[61852]: DEBUG oslo_vmware.api [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292904, 'name': PowerOffVM_Task, 'duration_secs': 0.371627} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.767296] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 834.767652] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 834.768167] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78a3f576-1791-463e-b239-9f71c99eabdb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.889157] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 834.892599] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 834.892890] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Deleting the datastore file [datastore1] 0ec1210f-7d42-4b71-abdc-9f818ffb91ea {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 834.893471] env[61852]: INFO nova.compute.manager [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Took 40.20 seconds to build instance. [ 834.894425] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-462285b3-e309-4eb9-b5e3-225937289881 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.901837] env[61852]: DEBUG oslo_vmware.api [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for the task: (returnval){ [ 834.901837] env[61852]: value = "task-1292908" [ 834.901837] env[61852]: _type = "Task" [ 834.901837] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.912665] env[61852]: DEBUG oslo_vmware.api [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.916445] env[61852]: DEBUG oslo_concurrency.lockutils [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.917096] env[61852]: DEBUG oslo_concurrency.lockutils [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.917261] env[61852]: DEBUG oslo_concurrency.lockutils [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.918411] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68f9cb5-dc69-4706-918a-52b40307d451 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.938633] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 834.938978] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 834.939250] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.939566] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 834.939828] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.940086] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 834.940430] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 834.943174] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 834.943174] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 834.943174] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 834.943174] env[61852]: DEBUG nova.virt.hardware [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 834.952289] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Reconfiguring VM to attach interface {{(pid=61852) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 834.952762] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b03065f6-6ee4-437a-af86-4531a43fb8da {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.974865] env[61852]: DEBUG oslo_vmware.api [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 834.974865] env[61852]: value = "task-1292909" [ 834.974865] env[61852]: _type = "Task" [ 834.974865] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.983294] env[61852]: DEBUG oslo_vmware.api [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292909, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.998381] env[61852]: DEBUG nova.network.neutron [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.048305] env[61852]: DEBUG nova.scheduler.client.report [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 835.066906] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101222} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.067200] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.068042] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab827076-ae60-4512-8d46-0a24ce5d9068 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.091230] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9/00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.094794] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ade36995-2f68-4e6a-81b6-4ec56f537948 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.122551] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292906, 'name': Rename_Task, 'duration_secs': 0.170646} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.124036] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 835.124036] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 835.124036] env[61852]: value = "task-1292910" [ 835.124036] env[61852]: _type = "Task" [ 835.124036] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.124286] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-825cf2e3-eb33-43f5-8542-1c747a796301 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.135169] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292910, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.136564] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 835.136564] env[61852]: value = "task-1292911" [ 835.136564] env[61852]: _type = "Task" [ 835.136564] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.144852] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292911, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.185076] env[61852]: DEBUG nova.compute.manager [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received event network-vif-plugged-b3f3d9b5-9c27-4415-b02c-58c0b1133727 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.185366] env[61852]: DEBUG oslo_concurrency.lockutils [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.185591] env[61852]: DEBUG oslo_concurrency.lockutils [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.185825] env[61852]: DEBUG oslo_concurrency.lockutils [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.185981] env[61852]: DEBUG nova.compute.manager [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] No waiting events found dispatching network-vif-plugged-b3f3d9b5-9c27-4415-b02c-58c0b1133727 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 835.186161] env[61852]: WARNING nova.compute.manager [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received unexpected event network-vif-plugged-b3f3d9b5-9c27-4415-b02c-58c0b1133727 for instance with vm_state active and task_state None. [ 835.186323] env[61852]: DEBUG nova.compute.manager [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received event network-changed-b3f3d9b5-9c27-4415-b02c-58c0b1133727 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.186561] env[61852]: DEBUG nova.compute.manager [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Refreshing instance network info cache due to event network-changed-b3f3d9b5-9c27-4415-b02c-58c0b1133727. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 835.186795] env[61852]: DEBUG oslo_concurrency.lockutils [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] Acquiring lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.186938] env[61852]: DEBUG oslo_concurrency.lockutils [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] Acquired lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.187123] env[61852]: DEBUG nova.network.neutron [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Refreshing network info cache for port b3f3d9b5-9c27-4415-b02c-58c0b1133727 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 835.211663] env[61852]: DEBUG nova.network.neutron [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Updating instance_info_cache with network_info: [{"id": "17a779c7-0b48-479d-88e1-f5dc1ec4eab9", "address": "fa:16:3e:58:b2:0e", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17a779c7-0b", "ovs_interfaceid": "17a779c7-0b48-479d-88e1-f5dc1ec4eab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.398076] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc618fbb-6b63-4225-8ca9-4f8ccbb12059 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.083s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.413630] env[61852]: DEBUG oslo_vmware.api [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Task: {'id': task-1292908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305757} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.413928] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.414133] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 835.414333] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 835.414518] env[61852]: INFO nova.compute.manager [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Took 1.18 seconds to destroy the instance on the hypervisor. [ 835.414898] env[61852]: DEBUG oslo.service.loopingcall [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.415130] env[61852]: DEBUG nova.compute.manager [-] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 835.415552] env[61852]: DEBUG nova.network.neutron [-] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.487022] env[61852]: DEBUG oslo_vmware.api [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292909, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.554066] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.556765] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.202s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.558435] env[61852]: INFO nova.compute.claims [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.580498] env[61852]: INFO nova.scheduler.client.report [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Deleted allocations for instance b0d38886-aacb-4b7e-9530-c5891d9cee66 [ 835.618576] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 835.637325] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292910, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.648087] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292911, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.651344] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.651672] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.651950] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.652170] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.652327] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.652479] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.652695] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.652947] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.653359] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.653471] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.654090] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.654536] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b083c10-fb03-47a5-a767-c4f39d0f6696 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.664922] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf1a066-1779-49ef-856c-e74c8edada0f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.715110] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "refresh_cache-d93b8055-1eb2-4368-a051-289dc5a9d0ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.715110] env[61852]: DEBUG nova.compute.manager [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Instance network_info: |[{"id": "17a779c7-0b48-479d-88e1-f5dc1ec4eab9", "address": "fa:16:3e:58:b2:0e", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17a779c7-0b", "ovs_interfaceid": "17a779c7-0b48-479d-88e1-f5dc1ec4eab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 835.715408] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:b2:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2c424c9-6446-4b2a-af8c-4d9c29117c39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17a779c7-0b48-479d-88e1-f5dc1ec4eab9', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 835.723119] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Creating folder: Project (4dbb543c66364861bf5f437c8c33a550). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 835.723439] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea990880-5829-4b51-8e24-2fbf6e01d583 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.731461] env[61852]: DEBUG nova.compute.manager [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Received event network-vif-plugged-17a779c7-0b48-479d-88e1-f5dc1ec4eab9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.731724] env[61852]: DEBUG oslo_concurrency.lockutils [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] Acquiring lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.731905] env[61852]: DEBUG oslo_concurrency.lockutils [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] Lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.732099] env[61852]: DEBUG oslo_concurrency.lockutils [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] Lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.732266] env[61852]: DEBUG nova.compute.manager [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] No waiting events found dispatching network-vif-plugged-17a779c7-0b48-479d-88e1-f5dc1ec4eab9 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 835.732492] env[61852]: WARNING nova.compute.manager [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Received unexpected event network-vif-plugged-17a779c7-0b48-479d-88e1-f5dc1ec4eab9 for instance with vm_state building and task_state spawning. [ 835.732651] env[61852]: DEBUG nova.compute.manager [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Received event network-changed-17a779c7-0b48-479d-88e1-f5dc1ec4eab9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.732799] env[61852]: DEBUG nova.compute.manager [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Refreshing instance network info cache due to event network-changed-17a779c7-0b48-479d-88e1-f5dc1ec4eab9. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 835.732984] env[61852]: DEBUG oslo_concurrency.lockutils [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] Acquiring lock "refresh_cache-d93b8055-1eb2-4368-a051-289dc5a9d0ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.733164] env[61852]: DEBUG oslo_concurrency.lockutils [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] Acquired lock "refresh_cache-d93b8055-1eb2-4368-a051-289dc5a9d0ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.733323] env[61852]: DEBUG nova.network.neutron [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Refreshing network info cache for port 17a779c7-0b48-479d-88e1-f5dc1ec4eab9 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 835.741377] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Created folder: Project (4dbb543c66364861bf5f437c8c33a550) in parent group-v277280. [ 835.741605] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Creating folder: Instances. Parent ref: group-v277349. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 835.742200] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f891d9a-5612-4c3a-9328-c1a90d0debce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.755697] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Created folder: Instances in parent group-v277349. [ 835.755969] env[61852]: DEBUG oslo.service.loopingcall [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.756182] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 835.756691] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e91b4c2-5721-432c-b8ec-4f66acc29672 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.779518] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 835.779518] env[61852]: value = "task-1292914" [ 835.779518] env[61852]: _type = "Task" [ 835.779518] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.787832] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292914, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.901302] env[61852]: DEBUG nova.compute.manager [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 835.922640] env[61852]: DEBUG nova.network.neutron [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updated VIF entry in instance network info cache for port b3f3d9b5-9c27-4415-b02c-58c0b1133727. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.923159] env[61852]: DEBUG nova.network.neutron [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3f3d9b5-9c27-4415-b02c-58c0b1133727", "address": "fa:16:3e:ab:83:17", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3f3d9b5-9c", "ovs_interfaceid": "b3f3d9b5-9c27-4415-b02c-58c0b1133727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.989410] env[61852]: DEBUG oslo_vmware.api [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292909, 'name': ReconfigVM_Task, 'duration_secs': 0.995702} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.990010] env[61852]: DEBUG oslo_concurrency.lockutils [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.990359] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Reconfigured VM to attach interface {{(pid=61852) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 836.097175] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e2547922-82ed-47f7-aa14-5087a926eba5 tempest-ServerAddressesTestJSON-188840798 tempest-ServerAddressesTestJSON-188840798-project-member] Lock "b0d38886-aacb-4b7e-9530-c5891d9cee66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.734s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.138165] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292910, 'name': ReconfigVM_Task, 'duration_secs': 0.528978} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.141363] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9/00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.142096] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-434c4ad8-ce7c-4d6f-9951-182a380aae8a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.148485] env[61852]: DEBUG oslo_vmware.api [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292911, 'name': PowerOnVM_Task, 'duration_secs': 0.635754} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.149777] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.150078] env[61852]: INFO nova.compute.manager [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Took 9.24 seconds to spawn the instance on the hypervisor. [ 836.150349] env[61852]: DEBUG nova.compute.manager [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 836.150727] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 836.150727] env[61852]: value = "task-1292915" [ 836.150727] env[61852]: _type = "Task" [ 836.150727] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.151540] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9e9a51-736e-4d8e-b6e8-47c9b8b9c605 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.161832] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292915, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.186348] env[61852]: DEBUG nova.network.neutron [-] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.290648] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292914, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.426751] env[61852]: DEBUG oslo_concurrency.lockutils [req-ebaa13aa-44a2-4e7e-a310-a9cf0a2acb1a req-ee242e31-ac65-43e2-8745-d779956b14db service nova] Releasing lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.429548] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.468489] env[61852]: DEBUG nova.network.neutron [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Updated VIF entry in instance network info cache for port 17a779c7-0b48-479d-88e1-f5dc1ec4eab9. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 836.468874] env[61852]: DEBUG nova.network.neutron [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Updating instance_info_cache with network_info: [{"id": "17a779c7-0b48-479d-88e1-f5dc1ec4eab9", "address": "fa:16:3e:58:b2:0e", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17a779c7-0b", "ovs_interfaceid": "17a779c7-0b48-479d-88e1-f5dc1ec4eab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.495854] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Successfully updated port: ebf57cb3-0f32-48ff-a39a-bc6c32d02167 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 836.498845] env[61852]: DEBUG oslo_concurrency.lockutils [None req-79e9e8e0-7e3e-4112-957a-f1726b0b80d0 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.718s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.502533] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "89970cff-cb49-4803-81a5-1675b0ea4aaf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.502765] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "89970cff-cb49-4803-81a5-1675b0ea4aaf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.502989] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "89970cff-cb49-4803-81a5-1675b0ea4aaf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.503188] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "89970cff-cb49-4803-81a5-1675b0ea4aaf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.503356] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "89970cff-cb49-4803-81a5-1675b0ea4aaf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.505286] env[61852]: INFO nova.compute.manager [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Terminating instance [ 836.507326] env[61852]: DEBUG nova.compute.manager [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 836.507512] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.508381] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8dbc30-ada3-4b02-a803-a8988c846a31 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.516912] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 836.517161] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c14f8526-07b9-46d3-ba1d-371a8d91ab29 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.524512] env[61852]: DEBUG oslo_vmware.api [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 836.524512] env[61852]: value = "task-1292916" [ 836.524512] env[61852]: _type = "Task" [ 836.524512] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.535172] env[61852]: DEBUG oslo_vmware.api [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292916, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.671209] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292915, 'name': Rename_Task, 'duration_secs': 0.250566} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.673160] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.676155] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4395ff65-29ea-4ffc-ae68-2ca5527e27cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.678651] env[61852]: INFO nova.compute.manager [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Took 36.71 seconds to build instance. [ 836.686046] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 836.686046] env[61852]: value = "task-1292917" [ 836.686046] env[61852]: _type = "Task" [ 836.686046] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.692552] env[61852]: INFO nova.compute.manager [-] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Took 1.28 seconds to deallocate network for instance. [ 836.702377] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.797875] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292914, 'name': CreateVM_Task, 'duration_secs': 0.639407} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.800896] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.802455] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.802455] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.802611] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 836.802904] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab9ec926-3e3b-4005-83c9-721bc0472294 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.809079] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 836.809079] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5265405e-515c-c76c-ea3a-2d57157ea0be" [ 836.809079] env[61852]: _type = "Task" [ 836.809079] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.820836] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5265405e-515c-c76c-ea3a-2d57157ea0be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.888161] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a786fc68-dea0-464b-bd15-e70a43f1b77d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.897082] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553f00e3-db5e-4b6b-b9b3-d055e191d940 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.931717] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1feec9be-c734-4f7c-8c01-a7f7ff3be15a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.941053] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb19334d-6ecd-4814-9424-7564dffb416d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.958520] env[61852]: DEBUG nova.compute.provider_tree [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.975629] env[61852]: DEBUG oslo_concurrency.lockutils [req-df82de94-ecd2-480c-98e4-bd107a98a930 req-95d23b36-8617-4cdb-8e54-7cb96e2a4088 service nova] Releasing lock "refresh_cache-d93b8055-1eb2-4368-a051-289dc5a9d0ed" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.001649] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "refresh_cache-988c0a5c-b84d-44cf-9068-defd7132b0c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.001778] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired lock "refresh_cache-988c0a5c-b84d-44cf-9068-defd7132b0c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.001932] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.035715] env[61852]: DEBUG oslo_vmware.api [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292916, 'name': PowerOffVM_Task, 'duration_secs': 0.257837} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.036023] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 837.036231] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 837.036510] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17c97182-e855-48a5-a15a-c654dbc877b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.122551] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 837.122853] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 837.123122] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Deleting the datastore file [datastore1] 89970cff-cb49-4803-81a5-1675b0ea4aaf {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.123485] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1a5ae8c-4fdd-4aab-bb6c-5c30854db72b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.131621] env[61852]: DEBUG oslo_vmware.api [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 837.131621] env[61852]: value = "task-1292919" [ 837.131621] env[61852]: _type = "Task" [ 837.131621] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.143589] env[61852]: DEBUG oslo_vmware.api [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.181776] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2a31ca1d-f9be-4c0c-8e98-dc74827482e5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "b0f8f7dd-e559-43be-b541-c3da48a07d68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.345s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.198137] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292917, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.200211] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.292486] env[61852]: DEBUG nova.compute.manager [req-597a5fa4-517b-4128-80f4-f0147599f970 req-5129d59d-4324-4fcb-95c7-6516c59dfaca service nova] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Received event network-vif-deleted-72d13320-e518-4f1a-98b0-cb48bcb2fe11 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 837.320647] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5265405e-515c-c76c-ea3a-2d57157ea0be, 'name': SearchDatastore_Task, 'duration_secs': 0.01848} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.320968] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.321877] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.321877] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.321877] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.321877] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 837.322246] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ec9bf6e-4a3e-474f-8d42-3c4f34ba98f0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.342645] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 837.342645] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 837.343465] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-081948b4-f075-4bcc-8631-38942dbf3a5c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.351232] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 837.351232] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529b112c-d8ad-f27c-3c03-007ee94247b8" [ 837.351232] env[61852]: _type = "Task" [ 837.351232] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.361304] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529b112c-d8ad-f27c-3c03-007ee94247b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.462419] env[61852]: DEBUG nova.scheduler.client.report [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 837.553020] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.642779] env[61852]: DEBUG oslo_vmware.api [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.698498] env[61852]: DEBUG oslo_vmware.api [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292917, 'name': PowerOnVM_Task, 'duration_secs': 0.706594} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.700038] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.700038] env[61852]: INFO nova.compute.manager [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Took 8.16 seconds to spawn the instance on the hypervisor. [ 837.700038] env[61852]: DEBUG nova.compute.manager [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 837.700655] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6fc5f6-d53b-41b6-acf2-66aa967138e6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.871137] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529b112c-d8ad-f27c-3c03-007ee94247b8, 'name': SearchDatastore_Task, 'duration_secs': 0.025344} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.873937] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00461ad9-328f-4d6b-b93f-463012696c71 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.880105] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 837.880105] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523c91a8-877c-b0be-9cbf-67fcad1e74ad" [ 837.880105] env[61852]: _type = "Task" [ 837.880105] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.892763] env[61852]: DEBUG nova.compute.manager [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Received event network-vif-plugged-ebf57cb3-0f32-48ff-a39a-bc6c32d02167 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 837.893097] env[61852]: DEBUG oslo_concurrency.lockutils [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] Acquiring lock "988c0a5c-b84d-44cf-9068-defd7132b0c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.893411] env[61852]: DEBUG oslo_concurrency.lockutils [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] Lock "988c0a5c-b84d-44cf-9068-defd7132b0c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.893674] env[61852]: DEBUG oslo_concurrency.lockutils [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] Lock "988c0a5c-b84d-44cf-9068-defd7132b0c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.893933] env[61852]: DEBUG nova.compute.manager [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] No waiting events found dispatching network-vif-plugged-ebf57cb3-0f32-48ff-a39a-bc6c32d02167 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 837.895293] env[61852]: WARNING nova.compute.manager [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Received unexpected event network-vif-plugged-ebf57cb3-0f32-48ff-a39a-bc6c32d02167 for instance with vm_state building and task_state spawning. [ 837.895578] env[61852]: DEBUG nova.compute.manager [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Received event network-changed-ebf57cb3-0f32-48ff-a39a-bc6c32d02167 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 837.895831] env[61852]: DEBUG nova.compute.manager [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Refreshing instance network info cache due to event network-changed-ebf57cb3-0f32-48ff-a39a-bc6c32d02167. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 837.896107] env[61852]: DEBUG oslo_concurrency.lockutils [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] Acquiring lock "refresh_cache-988c0a5c-b84d-44cf-9068-defd7132b0c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.900817] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Updating instance_info_cache with network_info: [{"id": "ebf57cb3-0f32-48ff-a39a-bc6c32d02167", "address": "fa:16:3e:a9:0a:71", "network": {"id": "f308dd1a-c776-4e59-81d2-5cd82ce8c6ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1429981735-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "856d91d948e84ab69536db1faebf54ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebf57cb3-0f", "ovs_interfaceid": "ebf57cb3-0f32-48ff-a39a-bc6c32d02167", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.907900] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523c91a8-877c-b0be-9cbf-67fcad1e74ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.972186] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.972722] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 837.975755] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.631s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.979018] env[61852]: DEBUG nova.objects.instance [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lazy-loading 'resources' on Instance uuid c94066d5-2e5f-4059-bdc5-385d517f1d84 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.979018] env[61852]: INFO nova.compute.manager [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Rebuilding instance [ 838.032421] env[61852]: DEBUG nova.compute.manager [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 838.033787] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15cac9d-8a21-4774-9bec-a3702ba25b2f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.142609] env[61852]: DEBUG oslo_vmware.api [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1292919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.567288} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.143089] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 838.143089] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 838.143245] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 838.143397] env[61852]: INFO nova.compute.manager [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Took 1.64 seconds to destroy the instance on the hypervisor. [ 838.143687] env[61852]: DEBUG oslo.service.loopingcall [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.143896] env[61852]: DEBUG nova.compute.manager [-] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 838.143990] env[61852]: DEBUG nova.network.neutron [-] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 838.221772] env[61852]: INFO nova.compute.manager [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Took 33.75 seconds to build instance. [ 838.394671] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523c91a8-877c-b0be-9cbf-67fcad1e74ad, 'name': SearchDatastore_Task, 'duration_secs': 0.029501} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.394671] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.394671] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d93b8055-1eb2-4368-a051-289dc5a9d0ed/d93b8055-1eb2-4368-a051-289dc5a9d0ed.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 838.394671] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9b8b380-b658-4892-9fc2-06be8c76cd46 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.401454] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 838.401454] env[61852]: value = "task-1292920" [ 838.401454] env[61852]: _type = "Task" [ 838.401454] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.416527] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Releasing lock "refresh_cache-988c0a5c-b84d-44cf-9068-defd7132b0c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.416527] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Instance network_info: |[{"id": "ebf57cb3-0f32-48ff-a39a-bc6c32d02167", "address": "fa:16:3e:a9:0a:71", "network": {"id": "f308dd1a-c776-4e59-81d2-5cd82ce8c6ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1429981735-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "856d91d948e84ab69536db1faebf54ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebf57cb3-0f", "ovs_interfaceid": "ebf57cb3-0f32-48ff-a39a-bc6c32d02167", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 838.416527] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.416527] env[61852]: DEBUG oslo_concurrency.lockutils [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] Acquired lock "refresh_cache-988c0a5c-b84d-44cf-9068-defd7132b0c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.416527] env[61852]: DEBUG nova.network.neutron [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Refreshing network info cache for port ebf57cb3-0f32-48ff-a39a-bc6c32d02167 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.420127] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:0a:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98f447de-d71e-41ef-bc37-ed97b4a1f58f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ebf57cb3-0f32-48ff-a39a-bc6c32d02167', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.426132] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Creating folder: Project (856d91d948e84ab69536db1faebf54ee). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.428684] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4dedbc3-679f-40a8-b89a-25734fd4677f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.441025] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Created folder: Project (856d91d948e84ab69536db1faebf54ee) in parent group-v277280. [ 838.441025] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Creating folder: Instances. Parent ref: group-v277352. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.441025] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36714b51-3905-4429-b8af-0d9663e866fb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.451155] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Created folder: Instances in parent group-v277352. [ 838.452082] env[61852]: DEBUG oslo.service.loopingcall [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.452082] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.452304] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e0740b5-4f90-453b-a1b7-11ba59a95f57 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.474256] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.474256] env[61852]: value = "task-1292923" [ 838.474256] env[61852]: _type = "Task" [ 838.474256] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.490023] env[61852]: DEBUG nova.compute.utils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 838.490648] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292923, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.491725] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 838.492070] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 838.543368] env[61852]: DEBUG nova.policy [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52bf0aad002740e28da26a9e1d6b14da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '856d91d948e84ab69536db1faebf54ee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 838.554465] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.554902] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c9d7c30-2235-4542-897b-f167e0c840b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.566579] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 838.566579] env[61852]: value = "task-1292924" [ 838.566579] env[61852]: _type = "Task" [ 838.566579] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.576030] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292924, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.705849] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-669836ae-c7e6-440f-b9bf-84b0d95a595e" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.706253] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-669836ae-c7e6-440f-b9bf-84b0d95a595e" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.706640] env[61852]: DEBUG nova.objects.instance [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'flavor' on Instance uuid d3922357-383f-4f7e-9c76-4eb688a092b9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 838.723738] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6d664db-e413-4243-be13-f5fc2721fdbd tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.539s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.856304] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021c0613-fe86-423a-94e9-434058c6bb6d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.869746] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2a716b-40bc-41fb-adf8-9ed87d888600 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.905308] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Successfully created port: 8f488560-af02-4742-8338-8d0855707346 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.910679] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50a9704-bbe3-4b9e-ae54-cccf7cbb1b8d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.925152] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5327e2-7c1a-4980-9165-f37081ca9388 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.929976] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292920, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.932022] env[61852]: DEBUG nova.network.neutron [-] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.943604] env[61852]: DEBUG nova.compute.provider_tree [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.987229] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292923, 'name': CreateVM_Task, 'duration_secs': 0.398153} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.987568] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 838.988125] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.988277] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.988608] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 838.988897] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8fcc414-7056-4abf-8ffe-86a044bbdbe8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.992369] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 838.996489] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 838.996489] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ca7bbf-f766-1af9-0216-bd173ec477ed" [ 838.996489] env[61852]: _type = "Task" [ 838.996489] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.006259] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ca7bbf-f766-1af9-0216-bd173ec477ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.074307] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292924, 'name': PowerOffVM_Task, 'duration_secs': 0.279265} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.074619] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.074842] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 839.075651] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58409ea-29a3-4193-a644-acd5a0f8ed17 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.082461] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.082730] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37c084a6-6552-4464-9ff6-75cc6d0c8caf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.144405] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.144624] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.144813] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleting the datastore file [datastore2] b0f8f7dd-e559-43be-b541-c3da48a07d68 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.145108] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b8093a1-d9e7-4671-9bb8-1e51752fd849 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.153584] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 839.153584] env[61852]: value = "task-1292926" [ 839.153584] env[61852]: _type = "Task" [ 839.153584] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.161715] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.175258] env[61852]: DEBUG nova.network.neutron [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Updated VIF entry in instance network info cache for port ebf57cb3-0f32-48ff-a39a-bc6c32d02167. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.175654] env[61852]: DEBUG nova.network.neutron [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Updating instance_info_cache with network_info: [{"id": "ebf57cb3-0f32-48ff-a39a-bc6c32d02167", "address": "fa:16:3e:a9:0a:71", "network": {"id": "f308dd1a-c776-4e59-81d2-5cd82ce8c6ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1429981735-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "856d91d948e84ab69536db1faebf54ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebf57cb3-0f", "ovs_interfaceid": "ebf57cb3-0f32-48ff-a39a-bc6c32d02167", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.315144] env[61852]: DEBUG nova.objects.instance [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'pci_requests' on Instance uuid d3922357-383f-4f7e-9c76-4eb688a092b9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 839.364831] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.365195] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.365445] env[61852]: INFO nova.compute.manager [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Shelving [ 839.415509] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292920, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579369} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.415695] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d93b8055-1eb2-4368-a051-289dc5a9d0ed/d93b8055-1eb2-4368-a051-289dc5a9d0ed.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 839.415914] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 839.416195] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-301d33ce-e526-448b-b1c2-7f569f8cf896 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.423117] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 839.423117] env[61852]: value = "task-1292927" [ 839.423117] env[61852]: _type = "Task" [ 839.423117] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.431775] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.434392] env[61852]: INFO nova.compute.manager [-] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Took 1.29 seconds to deallocate network for instance. [ 839.447385] env[61852]: DEBUG nova.scheduler.client.report [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 839.511547] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ca7bbf-f766-1af9-0216-bd173ec477ed, 'name': SearchDatastore_Task, 'duration_secs': 0.011053} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.511834] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.512083] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.512326] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.512476] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.512660] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.512928] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ca082b1-f31e-45f1-9370-95ad9ae28236 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.520917] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.521184] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 839.521862] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a9b52b7-1598-422f-9dd8-8351040e3f2c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.527158] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 839.527158] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c1bc04-eb93-d13d-7982-757279174593" [ 839.527158] env[61852]: _type = "Task" [ 839.527158] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.535560] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c1bc04-eb93-d13d-7982-757279174593, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.664668] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214921} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.664894] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 839.665085] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 839.665269] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 839.678609] env[61852]: DEBUG oslo_concurrency.lockutils [req-4055238e-1ba7-4390-8b69-aca7cc403705 req-19be0914-8282-4cf3-ab4a-0b16162067f3 service nova] Releasing lock "refresh_cache-988c0a5c-b84d-44cf-9068-defd7132b0c9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.818970] env[61852]: DEBUG nova.objects.base [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 839.818970] env[61852]: DEBUG nova.network.neutron [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 839.875751] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 839.876039] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10036092-6fd5-47c1-bf72-428c40538bf1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.884093] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 839.884093] env[61852]: value = "task-1292928" [ 839.884093] env[61852]: _type = "Task" [ 839.884093] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.892574] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.933561] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08358} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.933816] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.934643] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2f8633-b09f-4517-8ce1-af8cb92267c1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.949128] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.959047] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] d93b8055-1eb2-4368-a051-289dc5a9d0ed/d93b8055-1eb2-4368-a051-289dc5a9d0ed.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.959047] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.983s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.961201] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a0ce202-8a46-4e6c-af16-b7448a18f056 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.977186] env[61852]: DEBUG nova.policy [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f04d129452d4eb79514c52a6972af0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e93a6965a6884292bc56b01f7d54a622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 839.979644] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.010s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.980950] env[61852]: INFO nova.compute.claims [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 839.989277] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 839.989277] env[61852]: value = "task-1292929" [ 839.989277] env[61852]: _type = "Task" [ 839.989277] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.998014] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292929, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.000739] env[61852]: DEBUG nova.compute.manager [req-281f8dd5-ddd3-4971-a5ad-adc45bd89d70 req-f1e07648-91ea-40e7-9b75-13f6a88a92bb service nova] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Received event network-vif-deleted-145feb94-c188-4d2a-a614-870d122d1174 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 840.006396] env[61852]: INFO nova.scheduler.client.report [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleted allocations for instance c94066d5-2e5f-4059-bdc5-385d517f1d84 [ 840.008831] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 840.035024] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 840.035343] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 840.035459] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.035631] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 840.035782] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.035935] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 840.036185] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 840.036325] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 840.036497] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 840.036659] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 840.036833] env[61852]: DEBUG nova.virt.hardware [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.037949] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a50a9e-5916-47ec-847e-46436c4fdec2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.047528] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c1bc04-eb93-d13d-7982-757279174593, 'name': SearchDatastore_Task, 'duration_secs': 0.011831} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.050295] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7876ff25-aa62-4f3d-a24f-26381b17e7c0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.053476] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed347e1c-6b24-4426-ab94-7e60f55117a0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.061349] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 840.061349] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520ee827-cd6f-7ed4-666a-43d34695ec73" [ 840.061349] env[61852]: _type = "Task" [ 840.061349] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.080180] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520ee827-cd6f-7ed4-666a-43d34695ec73, 'name': SearchDatastore_Task, 'duration_secs': 0.014895} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.080469] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.080730] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 988c0a5c-b84d-44cf-9068-defd7132b0c9/988c0a5c-b84d-44cf-9068-defd7132b0c9.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.081011] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-897efb66-ee3a-4aa7-a8fe-7d02daf9e94a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.088583] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 840.088583] env[61852]: value = "task-1292930" [ 840.088583] env[61852]: _type = "Task" [ 840.088583] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.097612] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.214532] env[61852]: DEBUG oslo_vmware.rw_handles [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c085e5-e330-17cd-640c-b86970392ca9/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 840.215849] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3debf7-f4ed-48ff-a63d-24a73d4b24d0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.225953] env[61852]: DEBUG oslo_vmware.rw_handles [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c085e5-e330-17cd-640c-b86970392ca9/disk-0.vmdk is in state: ready. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 840.226250] env[61852]: ERROR oslo_vmware.rw_handles [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c085e5-e330-17cd-640c-b86970392ca9/disk-0.vmdk due to incomplete transfer. [ 840.226482] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-331e3d7b-4590-4dd9-9194-74d7452b9de2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.233864] env[61852]: DEBUG oslo_vmware.rw_handles [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c085e5-e330-17cd-640c-b86970392ca9/disk-0.vmdk. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 840.234218] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Uploaded image bec16c2f-506b-41d5-a8e9-28662d5a12ee to the Glance image server {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 840.236477] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Destroying the VM {{(pid=61852) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 840.236747] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-89fe869a-5dc8-49a1-ad83-26f7980835a5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.243069] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 840.243069] env[61852]: value = "task-1292931" [ 840.243069] env[61852]: _type = "Task" [ 840.243069] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.251237] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292931, 'name': Destroy_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.394591] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292928, 'name': PowerOffVM_Task, 'duration_secs': 0.272717} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.394961] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 840.395802] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2ee44b-abba-497e-b4cb-7b334fbbd4ae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.415569] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c36aa9-9c53-4636-9c1b-8ea98819c664 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.501618] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292929, 'name': ReconfigVM_Task, 'duration_secs': 0.375515} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.502188] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Reconfigured VM instance instance-00000044 to attach disk [datastore1] d93b8055-1eb2-4368-a051-289dc5a9d0ed/d93b8055-1eb2-4368-a051-289dc5a9d0ed.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 840.502959] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2eae6a4d-37af-464f-b576-df0db2012c21 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.511958] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 840.511958] env[61852]: value = "task-1292932" [ 840.511958] env[61852]: _type = "Task" [ 840.511958] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.520587] env[61852]: DEBUG oslo_concurrency.lockutils [None req-24c02fd9-3ce2-424c-b2ab-decf9a8e3beb tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "c94066d5-2e5f-4059-bdc5-385d517f1d84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.784s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.526730] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292932, 'name': Rename_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.602695] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292930, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.696755] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 840.697157] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 840.697341] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.697565] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 840.697721] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.697934] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 840.698206] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 840.698378] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 840.698550] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 840.698741] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 840.698910] env[61852]: DEBUG nova.virt.hardware [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.701263] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d980e9c7-897c-4642-b7be-e5d3931760ee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.709693] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6df903-5d11-4b91-a78c-66f9043544b8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.724643] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:51:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6bff739-5602-402b-8bb2-eb9bb4ab0bd7', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.732259] env[61852]: DEBUG oslo.service.loopingcall [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.732819] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.734279] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-473b50ec-bf28-4763-947f-a404663af669 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.756432] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292931, 'name': Destroy_Task, 'duration_secs': 0.490492} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.757884] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Destroyed the VM [ 840.758399] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Deleting Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 840.758637] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.758637] env[61852]: value = "task-1292933" [ 840.758637] env[61852]: _type = "Task" [ 840.758637] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.758876] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b3b9182c-aa17-46fe-92ed-44b1770970b4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.772092] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292933, 'name': CreateVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.772815] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 840.772815] env[61852]: value = "task-1292934" [ 840.772815] env[61852]: _type = "Task" [ 840.772815] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.777559] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Successfully updated port: 8f488560-af02-4742-8338-8d0855707346 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 840.781902] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292934, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.929086] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Creating Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 840.929362] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-eba08fb6-a029-46bd-b51c-7d91063d9f10 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.937099] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 840.937099] env[61852]: value = "task-1292935" [ 840.937099] env[61852]: _type = "Task" [ 840.937099] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.945333] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292935, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.027761] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292932, 'name': Rename_Task, 'duration_secs': 0.198182} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.028318] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 841.031942] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-051bd89d-7485-4107-b7d0-e81b96d73b34 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.040912] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 841.040912] env[61852]: value = "task-1292936" [ 841.040912] env[61852]: _type = "Task" [ 841.040912] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.056973] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292936, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.103941] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554481} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.108334] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 988c0a5c-b84d-44cf-9068-defd7132b0c9/988c0a5c-b84d-44cf-9068-defd7132b0c9.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.108683] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.109818] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-813e6dbc-48db-4333-bb15-3c37ea7eab0e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.117476] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 841.117476] env[61852]: value = "task-1292937" [ 841.117476] env[61852]: _type = "Task" [ 841.117476] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.134245] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292937, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.272114] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292933, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.287188] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "refresh_cache-f48b40ab-23f2-4071-8168-e7e2411ad64d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.287424] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired lock "refresh_cache-f48b40ab-23f2-4071-8168-e7e2411ad64d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.287643] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.289300] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292934, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.291893] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1a1193-d61c-4e76-b42c-50f28ca67656 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.303561] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e156ba-e7ea-4261-81ea-773bf03ee9ee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.344593] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5840e21c-9b57-4b21-b5f7-f4780a3c541c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.352634] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb6525d2-61fb-4ff7-8d5b-d6d9bbb82af7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.367801] env[61852]: DEBUG nova.compute.provider_tree [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.447067] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292935, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.499036] env[61852]: DEBUG nova.network.neutron [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Successfully updated port: 669836ae-c7e6-440f-b9bf-84b0d95a595e {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 841.509854] env[61852]: DEBUG nova.compute.manager [req-aa11780b-e5e9-4001-9888-9b9e70300a2d req-35a7aa02-c1d8-45e8-b276-c08ec4ba4e43 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received event network-vif-plugged-669836ae-c7e6-440f-b9bf-84b0d95a595e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.510105] env[61852]: DEBUG oslo_concurrency.lockutils [req-aa11780b-e5e9-4001-9888-9b9e70300a2d req-35a7aa02-c1d8-45e8-b276-c08ec4ba4e43 service nova] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.510315] env[61852]: DEBUG oslo_concurrency.lockutils [req-aa11780b-e5e9-4001-9888-9b9e70300a2d req-35a7aa02-c1d8-45e8-b276-c08ec4ba4e43 service nova] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.510483] env[61852]: DEBUG oslo_concurrency.lockutils [req-aa11780b-e5e9-4001-9888-9b9e70300a2d req-35a7aa02-c1d8-45e8-b276-c08ec4ba4e43 service nova] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.510644] env[61852]: DEBUG nova.compute.manager [req-aa11780b-e5e9-4001-9888-9b9e70300a2d req-35a7aa02-c1d8-45e8-b276-c08ec4ba4e43 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] No waiting events found dispatching network-vif-plugged-669836ae-c7e6-440f-b9bf-84b0d95a595e {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 841.510855] env[61852]: WARNING nova.compute.manager [req-aa11780b-e5e9-4001-9888-9b9e70300a2d req-35a7aa02-c1d8-45e8-b276-c08ec4ba4e43 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received unexpected event network-vif-plugged-669836ae-c7e6-440f-b9bf-84b0d95a595e for instance with vm_state active and task_state None. [ 841.551455] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292936, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.627677] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292937, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142985} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.627953] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.628727] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb8f312-89fb-4eed-bd85-97b0cd8287dc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.651126] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 988c0a5c-b84d-44cf-9068-defd7132b0c9/988c0a5c-b84d-44cf-9068-defd7132b0c9.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.651756] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75bfdf0c-56db-41e3-ab9b-ff681d1a33b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.671090] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 841.671090] env[61852]: value = "task-1292938" [ 841.671090] env[61852]: _type = "Task" [ 841.671090] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.678697] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292938, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.771737] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292933, 'name': CreateVM_Task, 'duration_secs': 0.524564} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.772222] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 841.772736] env[61852]: DEBUG oslo_concurrency.lockutils [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.772914] env[61852]: DEBUG oslo_concurrency.lockutils [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.773269] env[61852]: DEBUG oslo_concurrency.lockutils [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 841.773573] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16c9b99a-6b5d-429d-9182-eaca23643956 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.781288] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 841.781288] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e5110b-91e6-bd3b-74ae-9a69c3da8d96" [ 841.781288] env[61852]: _type = "Task" [ 841.781288] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.787613] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292934, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.795065] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e5110b-91e6-bd3b-74ae-9a69c3da8d96, 'name': SearchDatastore_Task, 'duration_secs': 0.012807} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.796086] env[61852]: DEBUG oslo_concurrency.lockutils [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.796086] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.796086] env[61852]: DEBUG oslo_concurrency.lockutils [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.796086] env[61852]: DEBUG oslo_concurrency.lockutils [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.796830] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.796830] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0563cc02-b528-4597-a988-b2b284131943 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.804240] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.804437] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.805151] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ba206f2-b39e-410d-bd2a-af347dff1a3e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.809987] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 841.809987] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9fa7c-0e2c-eaa1-671a-86df9ab18e62" [ 841.809987] env[61852]: _type = "Task" [ 841.809987] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.819924] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9fa7c-0e2c-eaa1-671a-86df9ab18e62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.826320] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.871995] env[61852]: DEBUG nova.scheduler.client.report [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 841.951287] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292935, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.984895] env[61852]: DEBUG nova.network.neutron [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Updating instance_info_cache with network_info: [{"id": "8f488560-af02-4742-8338-8d0855707346", "address": "fa:16:3e:2c:bd:b1", "network": {"id": "f308dd1a-c776-4e59-81d2-5cd82ce8c6ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1429981735-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "856d91d948e84ab69536db1faebf54ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f488560-af", "ovs_interfaceid": "8f488560-af02-4742-8338-8d0855707346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.002465] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.002734] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.002968] env[61852]: DEBUG nova.network.neutron [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 842.051514] env[61852]: DEBUG oslo_vmware.api [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292936, 'name': PowerOnVM_Task, 'duration_secs': 0.845679} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.051843] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 842.052172] env[61852]: INFO nova.compute.manager [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Took 8.91 seconds to spawn the instance on the hypervisor. [ 842.052426] env[61852]: DEBUG nova.compute.manager [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 842.053257] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77c6091-adc8-450d-b10f-e996ef91c9db {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.096876] env[61852]: DEBUG nova.compute.manager [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Received event network-vif-plugged-8f488560-af02-4742-8338-8d0855707346 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 842.097101] env[61852]: DEBUG oslo_concurrency.lockutils [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] Acquiring lock "f48b40ab-23f2-4071-8168-e7e2411ad64d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.097326] env[61852]: DEBUG oslo_concurrency.lockutils [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] Lock "f48b40ab-23f2-4071-8168-e7e2411ad64d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.097502] env[61852]: DEBUG oslo_concurrency.lockutils [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] Lock "f48b40ab-23f2-4071-8168-e7e2411ad64d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.097680] env[61852]: DEBUG nova.compute.manager [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] No waiting events found dispatching network-vif-plugged-8f488560-af02-4742-8338-8d0855707346 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 842.097843] env[61852]: WARNING nova.compute.manager [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Received unexpected event network-vif-plugged-8f488560-af02-4742-8338-8d0855707346 for instance with vm_state building and task_state spawning. [ 842.098011] env[61852]: DEBUG nova.compute.manager [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Received event network-changed-8f488560-af02-4742-8338-8d0855707346 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 842.098173] env[61852]: DEBUG nova.compute.manager [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Refreshing instance network info cache due to event network-changed-8f488560-af02-4742-8338-8d0855707346. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 842.098345] env[61852]: DEBUG oslo_concurrency.lockutils [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] Acquiring lock "refresh_cache-f48b40ab-23f2-4071-8168-e7e2411ad64d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.182955] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.286500] env[61852]: DEBUG oslo_vmware.api [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292934, 'name': RemoveSnapshot_Task, 'duration_secs': 1.086193} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.286791] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Deleted Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 842.287051] env[61852]: INFO nova.compute.manager [None req-f9e79e42-b1d9-4293-85c3-4431512b8cb2 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Took 16.58 seconds to snapshot the instance on the hypervisor. [ 842.320477] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9fa7c-0e2c-eaa1-671a-86df9ab18e62, 'name': SearchDatastore_Task, 'duration_secs': 0.012303} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.322070] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89498354-0ea2-4591-bf53-72a62c09ef89 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.328527] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 842.328527] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52068774-64af-82e2-bf08-2a629aa95719" [ 842.328527] env[61852]: _type = "Task" [ 842.328527] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.336216] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52068774-64af-82e2-bf08-2a629aa95719, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.378471] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.399s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.379016] env[61852]: DEBUG nova.compute.manager [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 842.382359] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.366s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.383867] env[61852]: INFO nova.compute.claims [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.448114] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292935, 'name': CreateSnapshot_Task, 'duration_secs': 1.273662} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.448373] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Created Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 842.449145] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d522e500-485e-45f4-913f-899048602c22 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.487503] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Releasing lock "refresh_cache-f48b40ab-23f2-4071-8168-e7e2411ad64d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.487783] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Instance network_info: |[{"id": "8f488560-af02-4742-8338-8d0855707346", "address": "fa:16:3e:2c:bd:b1", "network": {"id": "f308dd1a-c776-4e59-81d2-5cd82ce8c6ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1429981735-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "856d91d948e84ab69536db1faebf54ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f488560-af", "ovs_interfaceid": "8f488560-af02-4742-8338-8d0855707346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 842.488106] env[61852]: DEBUG oslo_concurrency.lockutils [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] Acquired lock "refresh_cache-f48b40ab-23f2-4071-8168-e7e2411ad64d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.488295] env[61852]: DEBUG nova.network.neutron [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Refreshing network info cache for port 8f488560-af02-4742-8338-8d0855707346 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.489456] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:bd:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98f447de-d71e-41ef-bc37-ed97b4a1f58f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f488560-af02-4742-8338-8d0855707346', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.496905] env[61852]: DEBUG oslo.service.loopingcall [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 842.499587] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.500063] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48e68922-0a8c-4ec1-b99c-664614c11cb1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.523172] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.523172] env[61852]: value = "task-1292939" [ 842.523172] env[61852]: _type = "Task" [ 842.523172] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.531145] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292939, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.561882] env[61852]: WARNING nova.network.neutron [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] d984a6fb-5f5f-4678-bc8a-3723c26f290a already exists in list: networks containing: ['d984a6fb-5f5f-4678-bc8a-3723c26f290a']. ignoring it [ 842.562113] env[61852]: WARNING nova.network.neutron [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] d984a6fb-5f5f-4678-bc8a-3723c26f290a already exists in list: networks containing: ['d984a6fb-5f5f-4678-bc8a-3723c26f290a']. ignoring it [ 842.570189] env[61852]: INFO nova.compute.manager [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Took 36.68 seconds to build instance. [ 842.681478] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292938, 'name': ReconfigVM_Task, 'duration_secs': 0.679903} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.681799] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 988c0a5c-b84d-44cf-9068-defd7132b0c9/988c0a5c-b84d-44cf-9068-defd7132b0c9.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.682433] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06cbd7d2-53d0-4a3d-a7af-97d75e98d95d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.689224] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 842.689224] env[61852]: value = "task-1292940" [ 842.689224] env[61852]: _type = "Task" [ 842.689224] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.700897] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292940, 'name': Rename_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.732190] env[61852]: DEBUG nova.network.neutron [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Updated VIF entry in instance network info cache for port 8f488560-af02-4742-8338-8d0855707346. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 842.732534] env[61852]: DEBUG nova.network.neutron [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Updating instance_info_cache with network_info: [{"id": "8f488560-af02-4742-8338-8d0855707346", "address": "fa:16:3e:2c:bd:b1", "network": {"id": "f308dd1a-c776-4e59-81d2-5cd82ce8c6ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1429981735-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "856d91d948e84ab69536db1faebf54ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f488560-af", "ovs_interfaceid": "8f488560-af02-4742-8338-8d0855707346", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.839719] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52068774-64af-82e2-bf08-2a629aa95719, 'name': SearchDatastore_Task, 'duration_secs': 0.019306} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.842544] env[61852]: DEBUG oslo_concurrency.lockutils [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.842847] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] b0f8f7dd-e559-43be-b541-c3da48a07d68/b0f8f7dd-e559-43be-b541-c3da48a07d68.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.843155] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3682ebd-1513-4ba1-8ebe-0dc665f5f27e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.849377] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 842.849377] env[61852]: value = "task-1292941" [ 842.849377] env[61852]: _type = "Task" [ 842.849377] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.861341] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.888607] env[61852]: DEBUG nova.compute.utils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 842.891984] env[61852]: DEBUG nova.compute.manager [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 842.892149] env[61852]: DEBUG nova.network.neutron [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 842.936521] env[61852]: DEBUG nova.policy [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f32481dd157a4c8fa5fd454246927b89', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '141420cfd45c49858f63c00ec065f245', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 842.966884] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Creating linked-clone VM from snapshot {{(pid=61852) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 842.967273] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5cf1e123-76a6-485d-bd00-634d6a7c5b14 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.976846] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 842.976846] env[61852]: value = "task-1292942" [ 842.976846] env[61852]: _type = "Task" [ 842.976846] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.987134] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292942, 'name': CloneVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.033719] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292939, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.036943] env[61852]: DEBUG nova.network.neutron [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3f3d9b5-9c27-4415-b02c-58c0b1133727", "address": "fa:16:3e:ab:83:17", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3f3d9b5-9c", "ovs_interfaceid": "b3f3d9b5-9c27-4415-b02c-58c0b1133727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "669836ae-c7e6-440f-b9bf-84b0d95a595e", "address": "fa:16:3e:ca:c9:ba", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap669836ae-c7", "ovs_interfaceid": "669836ae-c7e6-440f-b9bf-84b0d95a595e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.073202] env[61852]: DEBUG oslo_concurrency.lockutils [None req-acd17eaf-753c-4f18-8d59-9aa6d6f1a07b tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.659s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.204560] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292940, 'name': Rename_Task, 'duration_secs': 0.152416} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.205149] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.205479] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d663328-6dc1-4280-ac07-9dd876879719 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.214664] env[61852]: DEBUG nova.network.neutron [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Successfully created port: 2dae9519-f301-4529-81ef-5ee1ee9c0718 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 843.218160] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 843.218160] env[61852]: value = "task-1292943" [ 843.218160] env[61852]: _type = "Task" [ 843.218160] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.231434] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292943, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.237970] env[61852]: DEBUG oslo_concurrency.lockutils [req-e6c9cbde-8359-4494-a910-c9f096511e3b req-8aa015c9-5e44-43a1-b043-378c8cecdc17 service nova] Releasing lock "refresh_cache-f48b40ab-23f2-4071-8168-e7e2411ad64d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.361153] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292941, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.393126] env[61852]: DEBUG nova.compute.manager [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 843.496413] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292942, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.535421] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292939, 'name': CreateVM_Task, 'duration_secs': 0.567463} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.535601] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 843.536373] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.536503] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.536828] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 843.539315] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d4b3f69-8636-4366-9e01-0f5519abc018 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.541383] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.542021] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.542126] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.543106] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382b6d6e-a60f-4b22-8331-3eb79d2d2329 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.560084] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 843.560350] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 843.560512] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.560698] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 843.560849] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.560998] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 843.561227] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 843.561389] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 843.561587] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 843.561724] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 843.561895] env[61852]: DEBUG nova.virt.hardware [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 843.568687] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Reconfiguring VM to attach interface {{(pid=61852) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 843.571893] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ca9cc34-a509-45d0-b31f-9d0d9b6deead {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.583642] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 843.583642] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d2f72f-ebb2-21a2-c613-754f6b4c75bc" [ 843.583642] env[61852]: _type = "Task" [ 843.583642] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.590023] env[61852]: DEBUG oslo_vmware.api [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 843.590023] env[61852]: value = "task-1292944" [ 843.590023] env[61852]: _type = "Task" [ 843.590023] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.593731] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d2f72f-ebb2-21a2-c613-754f6b4c75bc, 'name': SearchDatastore_Task, 'duration_secs': 0.011089} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.598941] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.599207] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.599436] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.599587] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.599767] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.600214] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c13221e-41b8-4573-883e-22cbf55af5e9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.607365] env[61852]: DEBUG oslo_vmware.api [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292944, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.608381] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.608558] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 843.609283] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21f553ff-26bb-4d2f-9b15-ea866f2b1c57 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.616182] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 843.616182] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52614c25-b106-14b0-5325-32b3474f3188" [ 843.616182] env[61852]: _type = "Task" [ 843.616182] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.625370] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52614c25-b106-14b0-5325-32b3474f3188, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.725233] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fff45c-ba0e-4249-860b-a7800dba9224 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.737368] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292943, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.740443] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a45766-4ef5-4224-be51-147f466ae7d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.773321] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13262cb1-cf96-4762-ae7d-e9d0a91b4a6e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.781546] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a660299c-b560-4157-a38e-d8c6b794d41c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.796753] env[61852]: DEBUG nova.compute.provider_tree [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.863354] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67976} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.863778] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] b0f8f7dd-e559-43be-b541-c3da48a07d68/b0f8f7dd-e559-43be-b541-c3da48a07d68.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.864098] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.864477] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25c17315-0c3b-4faf-bbbf-f3cbe8fbcb89 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.871883] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 843.871883] env[61852]: value = "task-1292945" [ 843.871883] env[61852]: _type = "Task" [ 843.871883] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.880184] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.985986] env[61852]: DEBUG nova.compute.manager [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 843.986813] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a47ed2-c962-4b33-926b-b78336e82d59 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.994921] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292942, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.085984] env[61852]: DEBUG nova.compute.manager [req-18baa673-7af9-4db4-9ea8-65c88fa84963 req-9de8a42c-b709-49a9-a810-23f8128c8a2f service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received event network-changed-669836ae-c7e6-440f-b9bf-84b0d95a595e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 844.086268] env[61852]: DEBUG nova.compute.manager [req-18baa673-7af9-4db4-9ea8-65c88fa84963 req-9de8a42c-b709-49a9-a810-23f8128c8a2f service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Refreshing instance network info cache due to event network-changed-669836ae-c7e6-440f-b9bf-84b0d95a595e. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 844.086496] env[61852]: DEBUG oslo_concurrency.lockutils [req-18baa673-7af9-4db4-9ea8-65c88fa84963 req-9de8a42c-b709-49a9-a810-23f8128c8a2f service nova] Acquiring lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.086684] env[61852]: DEBUG oslo_concurrency.lockutils [req-18baa673-7af9-4db4-9ea8-65c88fa84963 req-9de8a42c-b709-49a9-a810-23f8128c8a2f service nova] Acquired lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.086877] env[61852]: DEBUG nova.network.neutron [req-18baa673-7af9-4db4-9ea8-65c88fa84963 req-9de8a42c-b709-49a9-a810-23f8128c8a2f service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Refreshing network info cache for port 669836ae-c7e6-440f-b9bf-84b0d95a595e {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 844.106243] env[61852]: DEBUG oslo_vmware.api [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.126665] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52614c25-b106-14b0-5325-32b3474f3188, 'name': SearchDatastore_Task, 'duration_secs': 0.009108} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.127522] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f35f84b6-1f78-4a5d-957b-129b99626e39 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.132643] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 844.132643] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529ae1e6-1990-bfc8-ae51-4f0600fb5953" [ 844.132643] env[61852]: _type = "Task" [ 844.132643] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.140392] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529ae1e6-1990-bfc8-ae51-4f0600fb5953, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.229652] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292943, 'name': PowerOnVM_Task, 'duration_secs': 0.717257} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.229896] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.230138] env[61852]: INFO nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Took 8.61 seconds to spawn the instance on the hypervisor. [ 844.230346] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 844.231189] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ca5bc4-eef2-4153-baf1-19700ed410b4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.300064] env[61852]: DEBUG nova.scheduler.client.report [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.381837] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162106} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.382095] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.384026] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06aae720-0d8a-474e-9276-9203177d6198 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.405609] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] b0f8f7dd-e559-43be-b541-c3da48a07d68/b0f8f7dd-e559-43be-b541-c3da48a07d68.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.406944] env[61852]: DEBUG nova.compute.manager [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 844.408918] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8881c81c-a703-433e-872e-e80cb1f027ba {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.430969] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 844.430969] env[61852]: value = "task-1292946" [ 844.430969] env[61852]: _type = "Task" [ 844.430969] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.439530] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292946, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.448035] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 844.448035] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 844.448233] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.448298] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 844.448452] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.448604] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 844.448814] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 844.448979] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 844.449171] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 844.449337] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 844.449514] env[61852]: DEBUG nova.virt.hardware [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 844.450704] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44bf6b6c-bc89-459f-b0ec-eced9dbfa57f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.459186] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8840788a-b993-4601-a1cd-e66280a08926 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.490688] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292942, 'name': CloneVM_Task, 'duration_secs': 1.379186} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.491010] env[61852]: INFO nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Created linked-clone VM from snapshot [ 844.491776] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a5efc4-bf48-46e3-a353-139cb98f8fcb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.500207] env[61852]: INFO nova.compute.manager [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] instance snapshotting [ 844.502232] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Uploading image 8eadd208-fbd0-4fde-9723-395ea516a40e {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 844.505151] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db059005-8444-4ebc-a212-a94e532a6e46 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.508037] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.508311] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.508534] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.508732] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.508913] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.525309] env[61852]: INFO nova.compute.manager [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Terminating instance [ 844.529464] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0547f70e-a46c-42f6-b63c-0cac6e8e44a0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.532710] env[61852]: DEBUG nova.compute.manager [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 844.532955] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 844.534015] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb41cdd-0adc-40dd-ba56-dd1997edc2c4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.540011] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 844.540011] env[61852]: value = "vm-277358" [ 844.540011] env[61852]: _type = "VirtualMachine" [ 844.540011] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 844.540266] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3be48e3d-c7a3-4bdb-a976-b2d827202a67 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.547278] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 844.547866] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2191b4e7-bbd2-4081-8191-aba2a5995c55 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.552822] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lease: (returnval){ [ 844.552822] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523e75e5-3ae3-05b9-9cf7-9f3cdc3ec9be" [ 844.552822] env[61852]: _type = "HttpNfcLease" [ 844.552822] env[61852]: } obtained for exporting VM: (result){ [ 844.552822] env[61852]: value = "vm-277358" [ 844.552822] env[61852]: _type = "VirtualMachine" [ 844.552822] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 844.553062] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the lease: (returnval){ [ 844.553062] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523e75e5-3ae3-05b9-9cf7-9f3cdc3ec9be" [ 844.553062] env[61852]: _type = "HttpNfcLease" [ 844.553062] env[61852]: } to be ready. {{(pid=61852) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 844.554298] env[61852]: DEBUG oslo_vmware.api [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 844.554298] env[61852]: value = "task-1292948" [ 844.554298] env[61852]: _type = "Task" [ 844.554298] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.566210] env[61852]: DEBUG oslo_vmware.api [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292948, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.567785] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 844.567785] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523e75e5-3ae3-05b9-9cf7-9f3cdc3ec9be" [ 844.567785] env[61852]: _type = "HttpNfcLease" [ 844.567785] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 844.603431] env[61852]: DEBUG oslo_vmware.api [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292944, 'name': ReconfigVM_Task, 'duration_secs': 0.63462} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.603990] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.604254] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Reconfigured VM to attach interface {{(pid=61852) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 844.643331] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529ae1e6-1990-bfc8-ae51-4f0600fb5953, 'name': SearchDatastore_Task, 'duration_secs': 0.028601} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.643618] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.643954] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] f48b40ab-23f2-4071-8168-e7e2411ad64d/f48b40ab-23f2-4071-8168-e7e2411ad64d.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 844.646431] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-527bc9bb-7728-4cb8-86e7-9ede992763d7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.653714] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 844.653714] env[61852]: value = "task-1292949" [ 844.653714] env[61852]: _type = "Task" [ 844.653714] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.662644] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.752074] env[61852]: INFO nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Took 34.48 seconds to build instance. [ 844.774405] env[61852]: DEBUG nova.network.neutron [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Successfully updated port: 2dae9519-f301-4529-81ef-5ee1ee9c0718 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.804307] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.804872] env[61852]: DEBUG nova.compute.manager [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 844.807753] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.757s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.809337] env[61852]: INFO nova.compute.claims [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.901583] env[61852]: DEBUG nova.network.neutron [req-18baa673-7af9-4db4-9ea8-65c88fa84963 req-9de8a42c-b709-49a9-a810-23f8128c8a2f service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updated VIF entry in instance network info cache for port 669836ae-c7e6-440f-b9bf-84b0d95a595e. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.902092] env[61852]: DEBUG nova.network.neutron [req-18baa673-7af9-4db4-9ea8-65c88fa84963 req-9de8a42c-b709-49a9-a810-23f8128c8a2f service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3f3d9b5-9c27-4415-b02c-58c0b1133727", "address": "fa:16:3e:ab:83:17", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3f3d9b5-9c", "ovs_interfaceid": "b3f3d9b5-9c27-4415-b02c-58c0b1133727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "669836ae-c7e6-440f-b9bf-84b0d95a595e", "address": "fa:16:3e:ca:c9:ba", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap669836ae-c7", "ovs_interfaceid": "669836ae-c7e6-440f-b9bf-84b0d95a595e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.942543] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292946, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.050754] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Creating Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 845.051126] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-274b618c-d8f7-41a5-8bc4-e20ae1cae31f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.064758] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 845.064758] env[61852]: value = "task-1292950" [ 845.064758] env[61852]: _type = "Task" [ 845.064758] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.072310] env[61852]: DEBUG oslo_vmware.api [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292948, 'name': PowerOffVM_Task, 'duration_secs': 0.215479} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.072543] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 845.072543] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523e75e5-3ae3-05b9-9cf7-9f3cdc3ec9be" [ 845.072543] env[61852]: _type = "HttpNfcLease" [ 845.072543] env[61852]: } is ready. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 845.073274] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 845.073464] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 845.073835] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 845.073835] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523e75e5-3ae3-05b9-9cf7-9f3cdc3ec9be" [ 845.073835] env[61852]: _type = "HttpNfcLease" [ 845.073835] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 845.074058] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60521554-d50a-4a2e-8997-d7efda23bb0e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.076436] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a856225-8f47-40c0-8ffa-4b03d415903e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.082288] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292950, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.088622] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fdb36d-ca46-2af2-fde1-79440cd967d6/disk-0.vmdk from lease info. {{(pid=61852) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 845.088902] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fdb36d-ca46-2af2-fde1-79440cd967d6/disk-0.vmdk for reading. {{(pid=61852) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 845.148902] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a021f535-a1bd-4c18-b6d7-6308e15d60e1 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-669836ae-c7e6-440f-b9bf-84b0d95a595e" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.443s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.163981] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292949, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.165633] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 845.165847] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 845.166086] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleting the datastore file [datastore1] d93b8055-1eb2-4368-a051-289dc5a9d0ed {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 845.167138] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5543301c-fd0b-4fd5-845d-06dfd5acd2b1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.173125] env[61852]: DEBUG oslo_vmware.api [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 845.173125] env[61852]: value = "task-1292952" [ 845.173125] env[61852]: _type = "Task" [ 845.173125] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.181447] env[61852]: DEBUG oslo_vmware.api [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.234049] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-099948d0-1df6-4f5c-a8f1-164d45f4ef69 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.256460] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "988c0a5c-b84d-44cf-9068-defd7132b0c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.129s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.277337] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquiring lock "refresh_cache-aeaa2828-6d83-4b26-bd1c-5f654c70713f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.277492] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquired lock "refresh_cache-aeaa2828-6d83-4b26-bd1c-5f654c70713f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.277648] env[61852]: DEBUG nova.network.neutron [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.315799] env[61852]: DEBUG nova.compute.utils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 845.318345] env[61852]: DEBUG nova.compute.manager [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 845.318434] env[61852]: DEBUG nova.network.neutron [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 845.360952] env[61852]: DEBUG nova.policy [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e46b44ff06d4ff2b0670ef17b3ef6e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ab258d1a24f3459d95421bcb84287f85', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 845.405175] env[61852]: DEBUG oslo_concurrency.lockutils [req-18baa673-7af9-4db4-9ea8-65c88fa84963 req-9de8a42c-b709-49a9-a810-23f8128c8a2f service nova] Releasing lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.443878] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292946, 'name': ReconfigVM_Task, 'duration_secs': 0.56778} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.444307] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Reconfigured VM instance instance-00000042 to attach disk [datastore1] b0f8f7dd-e559-43be-b541-c3da48a07d68/b0f8f7dd-e559-43be-b541-c3da48a07d68.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.445149] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57ccfa95-6c30-4e40-9441-acc463f58668 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.451946] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 845.451946] env[61852]: value = "task-1292953" [ 845.451946] env[61852]: _type = "Task" [ 845.451946] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.461065] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292953, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.575246] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292950, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.666232] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522053} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.666420] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] f48b40ab-23f2-4071-8168-e7e2411ad64d/f48b40ab-23f2-4071-8168-e7e2411ad64d.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 845.667036] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 845.667157] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-605c58d2-c8b2-4427-a242-ca4b9e1aa70d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.678416] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 845.678416] env[61852]: value = "task-1292954" [ 845.678416] env[61852]: _type = "Task" [ 845.678416] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.685737] env[61852]: DEBUG oslo_vmware.api [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1292952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198364} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.686507] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 845.686777] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 845.687073] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 845.687321] env[61852]: INFO nova.compute.manager [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Took 1.15 seconds to destroy the instance on the hypervisor. [ 845.687642] env[61852]: DEBUG oslo.service.loopingcall [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.687885] env[61852]: DEBUG nova.compute.manager [-] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 845.688182] env[61852]: DEBUG nova.network.neutron [-] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 845.693273] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292954, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.819292] env[61852]: DEBUG nova.compute.manager [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 845.842407] env[61852]: DEBUG nova.network.neutron [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 845.961281] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292953, 'name': Rename_Task, 'duration_secs': 0.243147} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.964341] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 845.964903] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b588ccb4-5e7f-47cc-8a28-2961673f1f16 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.970826] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 845.970826] env[61852]: value = "task-1292955" [ 845.970826] env[61852]: _type = "Task" [ 845.970826] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.984046] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.992638] env[61852]: DEBUG nova.network.neutron [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Successfully created port: 3d08b2a9-48bc-4f9f-bf97-a408e60853b0 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.077994] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292950, 'name': CreateSnapshot_Task, 'duration_secs': 0.864761} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.082307] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Created Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 846.083344] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f42d8fc-d97c-4e23-8b2c-36992d0fe8e0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.088015] env[61852]: DEBUG nova.network.neutron [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Updating instance_info_cache with network_info: [{"id": "2dae9519-f301-4529-81ef-5ee1ee9c0718", "address": "fa:16:3e:e2:84:99", "network": {"id": "afd44fc3-b7a6-40ec-a4a2-799f40399d17", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-531054454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "141420cfd45c49858f63c00ec065f245", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dae9519-f3", "ovs_interfaceid": "2dae9519-f301-4529-81ef-5ee1ee9c0718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.176830] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69eedb9-9983-455a-b95d-eb7f3a39d4e7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.196083] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22803254-c3c2-4014-8818-120688fdd5ce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.199908] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292954, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.288354} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.201031] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 846.201968] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc050b9-e66e-4945-924a-c46f4b06f668 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.230027] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a88f05-3bf6-4ae6-a92c-e2e864b3947f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.255289] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] f48b40ab-23f2-4071-8168-e7e2411ad64d/f48b40ab-23f2-4071-8168-e7e2411ad64d.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 846.257987] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11791c6c-5317-4dd1-a637-9bd2e9674ec2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.274765] env[61852]: DEBUG nova.compute.manager [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Received event network-vif-plugged-2dae9519-f301-4529-81ef-5ee1ee9c0718 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 846.274986] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] Acquiring lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.275226] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] Lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.275403] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] Lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.275574] env[61852]: DEBUG nova.compute.manager [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] No waiting events found dispatching network-vif-plugged-2dae9519-f301-4529-81ef-5ee1ee9c0718 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 846.275818] env[61852]: WARNING nova.compute.manager [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Received unexpected event network-vif-plugged-2dae9519-f301-4529-81ef-5ee1ee9c0718 for instance with vm_state building and task_state spawning. [ 846.276009] env[61852]: DEBUG nova.compute.manager [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Received event network-changed-2dae9519-f301-4529-81ef-5ee1ee9c0718 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 846.276173] env[61852]: DEBUG nova.compute.manager [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Refreshing instance network info cache due to event network-changed-2dae9519-f301-4529-81ef-5ee1ee9c0718. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 846.276344] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] Acquiring lock "refresh_cache-aeaa2828-6d83-4b26-bd1c-5f654c70713f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.281193] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7e069a-f143-4c9e-bf89-2da2d5fb74a6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.287247] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 846.287247] env[61852]: value = "task-1292956" [ 846.287247] env[61852]: _type = "Task" [ 846.287247] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.299057] env[61852]: DEBUG nova.compute.provider_tree [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.306621] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292956, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.481670] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292955, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.592768] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Releasing lock "refresh_cache-aeaa2828-6d83-4b26-bd1c-5f654c70713f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.593195] env[61852]: DEBUG nova.compute.manager [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Instance network_info: |[{"id": "2dae9519-f301-4529-81ef-5ee1ee9c0718", "address": "fa:16:3e:e2:84:99", "network": {"id": "afd44fc3-b7a6-40ec-a4a2-799f40399d17", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-531054454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "141420cfd45c49858f63c00ec065f245", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dae9519-f3", "ovs_interfaceid": "2dae9519-f301-4529-81ef-5ee1ee9c0718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 846.593648] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] Acquired lock "refresh_cache-aeaa2828-6d83-4b26-bd1c-5f654c70713f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.593920] env[61852]: DEBUG nova.network.neutron [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Refreshing network info cache for port 2dae9519-f301-4529-81ef-5ee1ee9c0718 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 846.595618] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:84:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2f5e5e2-e460-49ce-aa24-232e4a8007af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2dae9519-f301-4529-81ef-5ee1ee9c0718', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 846.604735] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Creating folder: Project (141420cfd45c49858f63c00ec065f245). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.614039] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Creating linked-clone VM from snapshot {{(pid=61852) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 846.614485] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec592619-796e-4106-adc1-163be0f0fd40 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.617461] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-70dc400a-8eb1-48a6-9856-9e46644d7d68 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.622101] env[61852]: DEBUG nova.network.neutron [-] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.632749] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 846.632749] env[61852]: value = "task-1292958" [ 846.632749] env[61852]: _type = "Task" [ 846.632749] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.639657] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Created folder: Project (141420cfd45c49858f63c00ec065f245) in parent group-v277280. [ 846.639657] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Creating folder: Instances. Parent ref: group-v277360. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.639657] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a3cf46f-c2a0-46ee-b036-88125c93804a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.645604] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292958, 'name': CloneVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.649180] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Created folder: Instances in parent group-v277360. [ 846.649858] env[61852]: DEBUG oslo.service.loopingcall [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 846.649858] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 846.649858] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fed1be81-61a4-412b-8dec-e571c1947a42 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.669316] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 846.669316] env[61852]: value = "task-1292960" [ 846.669316] env[61852]: _type = "Task" [ 846.669316] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.677885] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292960, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.797755] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292956, 'name': ReconfigVM_Task, 'duration_secs': 0.2724} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.798153] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Reconfigured VM instance instance-00000046 to attach disk [datastore1] f48b40ab-23f2-4071-8168-e7e2411ad64d/f48b40ab-23f2-4071-8168-e7e2411ad64d.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.798853] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83f9580b-64fd-467c-9882-dd7649ef7cfb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.803022] env[61852]: DEBUG nova.scheduler.client.report [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.806992] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 846.806992] env[61852]: value = "task-1292961" [ 846.806992] env[61852]: _type = "Task" [ 846.806992] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.819268] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292961, 'name': Rename_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.833941] env[61852]: DEBUG nova.compute.manager [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 846.865368] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.865655] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.865823] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.866058] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.866214] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.866365] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.866579] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.866743] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.866916] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.867098] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.867278] env[61852]: DEBUG nova.virt.hardware [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.868240] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5feecb93-6d3b-4af1-8181-141fd5833e83 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.882050] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667288d1-5924-4fda-bb3b-ead560743b32 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.981635] env[61852]: DEBUG oslo_vmware.api [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292955, 'name': PowerOnVM_Task, 'duration_secs': 0.672127} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.981960] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.982166] env[61852]: DEBUG nova.compute.manager [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 846.983028] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4135060-02d1-48f1-a948-e08b81392f81 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.125393] env[61852]: INFO nova.compute.manager [-] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Took 1.44 seconds to deallocate network for instance. [ 847.144553] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292958, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.180142] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292960, 'name': CreateVM_Task, 'duration_secs': 0.377561} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.180142] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.180953] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.181144] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.181492] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 847.181767] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93b42798-7729-42a9-9003-1b119bc66d71 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.187026] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for the task: (returnval){ [ 847.187026] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52998a99-1998-702b-56a4-24be6ce7f616" [ 847.187026] env[61852]: _type = "Task" [ 847.187026] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.195726] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52998a99-1998-702b-56a4-24be6ce7f616, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.309111] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.309656] env[61852]: DEBUG nova.compute.manager [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 847.312388] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.910s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.312759] env[61852]: DEBUG nova.objects.instance [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lazy-loading 'resources' on Instance uuid e265a4be-7b37-40b5-a199-42a7cd945f66 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 847.323924] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292961, 'name': Rename_Task, 'duration_secs': 0.138591} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.324824] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.325231] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9442dfb4-4c94-4cd8-9920-08a57f2c3056 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.332675] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 847.332675] env[61852]: value = "task-1292962" [ 847.332675] env[61852]: _type = "Task" [ 847.332675] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.339270] env[61852]: DEBUG nova.network.neutron [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Updated VIF entry in instance network info cache for port 2dae9519-f301-4529-81ef-5ee1ee9c0718. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 847.339584] env[61852]: DEBUG nova.network.neutron [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Updating instance_info_cache with network_info: [{"id": "2dae9519-f301-4529-81ef-5ee1ee9c0718", "address": "fa:16:3e:e2:84:99", "network": {"id": "afd44fc3-b7a6-40ec-a4a2-799f40399d17", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-531054454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "141420cfd45c49858f63c00ec065f245", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dae9519-f3", "ovs_interfaceid": "2dae9519-f301-4529-81ef-5ee1ee9c0718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.343652] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292962, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.376380] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-b3f3d9b5-9c27-4415-b02c-58c0b1133727" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.376957] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-b3f3d9b5-9c27-4415-b02c-58c0b1133727" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.503319] env[61852]: DEBUG oslo_concurrency.lockutils [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.632794] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.644247] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292958, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.699721] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52998a99-1998-702b-56a4-24be6ce7f616, 'name': SearchDatastore_Task, 'duration_secs': 0.010196} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.700295] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.700684] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 847.701080] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.701380] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.701988] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 847.702450] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-871c64b7-ede6-49bf-bd67-797f1c98fd49 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.711184] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 847.714019] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 847.714019] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b823639-8087-49c0-b5bc-436e50926722 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.719168] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for the task: (returnval){ [ 847.719168] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52185e7b-a89a-c5ae-8f2a-5c151a9c0913" [ 847.719168] env[61852]: _type = "Task" [ 847.719168] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.728567] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52185e7b-a89a-c5ae-8f2a-5c151a9c0913, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.822037] env[61852]: DEBUG nova.compute.utils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 847.825441] env[61852]: DEBUG nova.compute.manager [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 847.826188] env[61852]: DEBUG nova.network.neutron [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 847.846310] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a57f473-bf28-402d-9442-24d019a1cd76 req-46020201-33e6-41fd-8cd3-89847f2cd42d service nova] Releasing lock "refresh_cache-aeaa2828-6d83-4b26-bd1c-5f654c70713f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.846965] env[61852]: DEBUG oslo_vmware.api [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292962, 'name': PowerOnVM_Task, 'duration_secs': 0.470986} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.852272] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.852272] env[61852]: INFO nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Took 7.84 seconds to spawn the instance on the hypervisor. [ 847.852272] env[61852]: DEBUG nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 847.852272] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af56542-a49e-43b4-aae3-89d0ccf23927 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.878043] env[61852]: DEBUG nova.policy [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7422e3984ccc486dbfc98aa24a9295da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1fdd2d4aeb954b6fae049090b32f657b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 847.882572] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.882572] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.883071] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e772cf59-d5b4-4889-996f-b6f3b5238b28 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.907665] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "b0f8f7dd-e559-43be-b541-c3da48a07d68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.908061] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "b0f8f7dd-e559-43be-b541-c3da48a07d68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.908421] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "b0f8f7dd-e559-43be-b541-c3da48a07d68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.908717] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "b0f8f7dd-e559-43be-b541-c3da48a07d68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.909036] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "b0f8f7dd-e559-43be-b541-c3da48a07d68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.914103] env[61852]: INFO nova.compute.manager [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Terminating instance [ 847.916622] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e22999-0023-4130-a414-5bd957309e1f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.921463] env[61852]: DEBUG nova.compute.manager [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 847.921776] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 847.923245] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35e42f9-c4df-4722-97e9-1e6c51423f7b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.932812] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 847.959022] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f95535fe-66af-447a-867c-3c469fdf4381 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.964710] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Reconfiguring VM to detach interface {{(pid=61852) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 847.967718] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f09bfb1-48b6-44d5-97e8-3958238c8648 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.987608] env[61852]: DEBUG oslo_vmware.api [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 847.987608] env[61852]: value = "task-1292963" [ 847.987608] env[61852]: _type = "Task" [ 847.987608] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.989199] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 847.989199] env[61852]: value = "task-1292964" [ 847.989199] env[61852]: _type = "Task" [ 847.989199] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.007212] env[61852]: DEBUG oslo_vmware.api [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292963, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.007504] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.040417] env[61852]: DEBUG nova.compute.manager [req-a6c51530-0648-44ce-9638-84302ecea584 req-bd32ac50-ad65-41be-9fbf-124088aa4f41 service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Received event network-vif-plugged-3d08b2a9-48bc-4f9f-bf97-a408e60853b0 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 848.040804] env[61852]: DEBUG oslo_concurrency.lockutils [req-a6c51530-0648-44ce-9638-84302ecea584 req-bd32ac50-ad65-41be-9fbf-124088aa4f41 service nova] Acquiring lock "21d74604-6a64-44ee-a012-ebff7166853e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.040967] env[61852]: DEBUG oslo_concurrency.lockutils [req-a6c51530-0648-44ce-9638-84302ecea584 req-bd32ac50-ad65-41be-9fbf-124088aa4f41 service nova] Lock "21d74604-6a64-44ee-a012-ebff7166853e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.041083] env[61852]: DEBUG oslo_concurrency.lockutils [req-a6c51530-0648-44ce-9638-84302ecea584 req-bd32ac50-ad65-41be-9fbf-124088aa4f41 service nova] Lock "21d74604-6a64-44ee-a012-ebff7166853e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.041251] env[61852]: DEBUG nova.compute.manager [req-a6c51530-0648-44ce-9638-84302ecea584 req-bd32ac50-ad65-41be-9fbf-124088aa4f41 service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] No waiting events found dispatching network-vif-plugged-3d08b2a9-48bc-4f9f-bf97-a408e60853b0 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 848.041417] env[61852]: WARNING nova.compute.manager [req-a6c51530-0648-44ce-9638-84302ecea584 req-bd32ac50-ad65-41be-9fbf-124088aa4f41 service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Received unexpected event network-vif-plugged-3d08b2a9-48bc-4f9f-bf97-a408e60853b0 for instance with vm_state building and task_state spawning. [ 848.087091] env[61852]: DEBUG nova.network.neutron [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Successfully updated port: 3d08b2a9-48bc-4f9f-bf97-a408e60853b0 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.146025] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292958, 'name': CloneVM_Task} progress is 95%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.206818] env[61852]: DEBUG nova.network.neutron [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Successfully created port: 83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 848.234551] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52185e7b-a89a-c5ae-8f2a-5c151a9c0913, 'name': SearchDatastore_Task, 'duration_secs': 0.012306} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.235469] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc44d07b-f3fb-44c5-b45a-39bebb6d0319 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.241077] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e54c30d-254d-4908-afca-73915f0c2184 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.245874] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for the task: (returnval){ [ 848.245874] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb45af-f9f6-3c56-4d89-2414fc8d74b1" [ 848.245874] env[61852]: _type = "Task" [ 848.245874] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.253121] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fe5648-c414-492d-88e0-167f11de8737 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.260781] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb45af-f9f6-3c56-4d89-2414fc8d74b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.287815] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152ce4ef-ae9d-400b-80d3-7715fe935a6c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.296374] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a46cd28-7559-4775-be96-7e6ab1a1c314 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.309524] env[61852]: DEBUG nova.compute.provider_tree [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.325990] env[61852]: DEBUG nova.compute.manager [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 848.378843] env[61852]: INFO nova.compute.manager [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Took 36.05 seconds to build instance. [ 848.391293] env[61852]: DEBUG nova.compute.manager [req-0fc27fd8-3a27-4594-9e5a-99ab971d5578 req-d259d42a-dd4b-4d08-a8d5-22f978c579ad service nova] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Received event network-vif-deleted-17a779c7-0b48-479d-88e1-f5dc1ec4eab9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 848.502714] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.507437] env[61852]: DEBUG oslo_vmware.api [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292963, 'name': PowerOffVM_Task, 'duration_secs': 0.274698} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.508072] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 848.508278] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 848.508542] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6b106da-928f-4414-8f01-4e50ccab73b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.574621] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.574942] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.593023] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "refresh_cache-21d74604-6a64-44ee-a012-ebff7166853e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.593023] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquired lock "refresh_cache-21d74604-6a64-44ee-a012-ebff7166853e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.593143] env[61852]: DEBUG nova.network.neutron [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 848.597697] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 848.597817] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 848.597957] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleting the datastore file [datastore1] b0f8f7dd-e559-43be-b541-c3da48a07d68 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 848.598238] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4b171b1-3f0c-4fd3-b7f8-2a4b9fcf1ae0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.605773] env[61852]: DEBUG oslo_vmware.api [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 848.605773] env[61852]: value = "task-1292966" [ 848.605773] env[61852]: _type = "Task" [ 848.605773] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.614268] env[61852]: DEBUG oslo_vmware.api [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.648695] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1292958, 'name': CloneVM_Task, 'duration_secs': 1.824331} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.648975] env[61852]: INFO nova.virt.vmwareapi.vmops [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Created linked-clone VM from snapshot [ 848.649814] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985ca4e0-e57e-48af-83e4-eb8c718735c9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.660144] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Uploading image f9405a8f-dc2e-4186-8a5b-95f60d4b7241 {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 848.686089] env[61852]: DEBUG oslo_vmware.rw_handles [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 848.686089] env[61852]: value = "vm-277362" [ 848.686089] env[61852]: _type = "VirtualMachine" [ 848.686089] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 848.686413] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-71345ddb-1afc-413c-8a4c-2a9aa3a62ab2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.698140] env[61852]: DEBUG oslo_vmware.rw_handles [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lease: (returnval){ [ 848.698140] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9d6a0-ae6b-7c77-a6f6-c2558722dee2" [ 848.698140] env[61852]: _type = "HttpNfcLease" [ 848.698140] env[61852]: } obtained for exporting VM: (result){ [ 848.698140] env[61852]: value = "vm-277362" [ 848.698140] env[61852]: _type = "VirtualMachine" [ 848.698140] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 848.698425] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the lease: (returnval){ [ 848.698425] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9d6a0-ae6b-7c77-a6f6-c2558722dee2" [ 848.698425] env[61852]: _type = "HttpNfcLease" [ 848.698425] env[61852]: } to be ready. {{(pid=61852) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 848.705485] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 848.705485] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9d6a0-ae6b-7c77-a6f6-c2558722dee2" [ 848.705485] env[61852]: _type = "HttpNfcLease" [ 848.705485] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 848.757144] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fb45af-f9f6-3c56-4d89-2414fc8d74b1, 'name': SearchDatastore_Task, 'duration_secs': 0.014733} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.757544] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.758158] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] aeaa2828-6d83-4b26-bd1c-5f654c70713f/aeaa2828-6d83-4b26-bd1c-5f654c70713f.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 848.759162] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3005716-df9d-4ba3-a415-16df3faa68de {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.775266] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for the task: (returnval){ [ 848.775266] env[61852]: value = "task-1292968" [ 848.775266] env[61852]: _type = "Task" [ 848.775266] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.784355] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292968, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.813032] env[61852]: DEBUG nova.scheduler.client.report [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.854702] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "988c0a5c-b84d-44cf-9068-defd7132b0c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.854986] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "988c0a5c-b84d-44cf-9068-defd7132b0c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.855264] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "988c0a5c-b84d-44cf-9068-defd7132b0c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.855514] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "988c0a5c-b84d-44cf-9068-defd7132b0c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.855700] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "988c0a5c-b84d-44cf-9068-defd7132b0c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.860854] env[61852]: INFO nova.compute.manager [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Terminating instance [ 848.865714] env[61852]: DEBUG nova.compute.manager [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 848.865927] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.867064] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892d5ad5-6afd-4b16-82d4-aae4b0ec1fe8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.875133] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.875391] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bf9dd4b-fd1d-4b11-b6ad-ae1ac419f5e0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.880787] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aaab17e3-7872-43f2-a8bb-aade220bd262 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "f48b40ab-23f2-4071-8168-e7e2411ad64d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.727s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.882149] env[61852]: DEBUG oslo_vmware.api [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 848.882149] env[61852]: value = "task-1292969" [ 848.882149] env[61852]: _type = "Task" [ 848.882149] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.890872] env[61852]: DEBUG oslo_vmware.api [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292969, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.975231] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "f48b40ab-23f2-4071-8168-e7e2411ad64d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.975496] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "f48b40ab-23f2-4071-8168-e7e2411ad64d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.975767] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "f48b40ab-23f2-4071-8168-e7e2411ad64d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.975999] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "f48b40ab-23f2-4071-8168-e7e2411ad64d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.976248] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "f48b40ab-23f2-4071-8168-e7e2411ad64d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.978633] env[61852]: INFO nova.compute.manager [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Terminating instance [ 848.980670] env[61852]: DEBUG nova.compute.manager [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 848.980875] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.981846] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6421d384-7518-4242-a2a5-12ab48065c51 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.989888] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.990207] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68472f5b-e595-4d30-bc0d-484904674875 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.998943] env[61852]: DEBUG oslo_vmware.api [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 848.998943] env[61852]: value = "task-1292970" [ 848.998943] env[61852]: _type = "Task" [ 848.998943] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.006203] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.010884] env[61852]: DEBUG oslo_vmware.api [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292970, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.077627] env[61852]: DEBUG nova.compute.manager [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 849.119698] env[61852]: DEBUG oslo_vmware.api [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1292966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287777} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.119698] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 849.119698] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 849.119698] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 849.120070] env[61852]: INFO nova.compute.manager [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Took 1.20 seconds to destroy the instance on the hypervisor. [ 849.120242] env[61852]: DEBUG oslo.service.loopingcall [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 849.120478] env[61852]: DEBUG nova.compute.manager [-] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 849.120574] env[61852]: DEBUG nova.network.neutron [-] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 849.128587] env[61852]: DEBUG nova.network.neutron [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.208782] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 849.208782] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9d6a0-ae6b-7c77-a6f6-c2558722dee2" [ 849.208782] env[61852]: _type = "HttpNfcLease" [ 849.208782] env[61852]: } is ready. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 849.209599] env[61852]: DEBUG oslo_vmware.rw_handles [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 849.209599] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9d6a0-ae6b-7c77-a6f6-c2558722dee2" [ 849.209599] env[61852]: _type = "HttpNfcLease" [ 849.209599] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 849.210127] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8a0cd1-b9d8-489d-aab8-6d0b6cac7d85 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.221310] env[61852]: DEBUG oslo_vmware.rw_handles [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a2f0e-2c8a-9ba6-d8bd-3a8ae9aa3db2/disk-0.vmdk from lease info. {{(pid=61852) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 849.221569] env[61852]: DEBUG oslo_vmware.rw_handles [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a2f0e-2c8a-9ba6-d8bd-3a8ae9aa3db2/disk-0.vmdk for reading. {{(pid=61852) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 849.292570] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292968, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.317773] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.005s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.320660] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.989s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.322662] env[61852]: INFO nova.compute.claims [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.328933] env[61852]: DEBUG nova.network.neutron [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Updating instance_info_cache with network_info: [{"id": "3d08b2a9-48bc-4f9f-bf97-a408e60853b0", "address": "fa:16:3e:d7:aa:23", "network": {"id": "eed7489b-d5d5-4cbe-b602-9d2203abb4c8", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1239226515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab258d1a24f3459d95421bcb84287f85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d08b2a9-48", "ovs_interfaceid": "3d08b2a9-48bc-4f9f-bf97-a408e60853b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.330727] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-37c321dc-2205-4462-a766-3b759fdc088f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.335517] env[61852]: DEBUG nova.compute.manager [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 849.352939] env[61852]: INFO nova.scheduler.client.report [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Deleted allocations for instance e265a4be-7b37-40b5-a199-42a7cd945f66 [ 849.374125] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 849.374455] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 849.374631] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.374830] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 849.374978] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.375148] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 849.375365] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 849.375530] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 849.375705] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 849.375873] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 849.376064] env[61852]: DEBUG nova.virt.hardware [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 849.378292] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36724426-e35c-4472-b217-53568a82f57f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.394826] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e370bd-f6f6-4460-b539-612c2f89a26e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.403555] env[61852]: DEBUG oslo_vmware.api [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292969, 'name': PowerOffVM_Task, 'duration_secs': 0.231967} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.404316] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.404486] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 849.404755] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2ec5bfb-7d36-449c-a53d-5ce44f020de4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.477307] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 849.477307] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 849.477307] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Deleting the datastore file [datastore2] 988c0a5c-b84d-44cf-9068-defd7132b0c9 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 849.477531] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c479fa05-efd2-499e-a6d2-7f2e4595b0ba {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.484792] env[61852]: DEBUG oslo_vmware.api [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 849.484792] env[61852]: value = "task-1292972" [ 849.484792] env[61852]: _type = "Task" [ 849.484792] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.492700] env[61852]: DEBUG oslo_vmware.api [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292972, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.506858] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.511996] env[61852]: DEBUG oslo_vmware.api [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292970, 'name': PowerOffVM_Task, 'duration_secs': 0.443238} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.512296] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.512467] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 849.512759] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c249cc8e-b7c8-47d5-bf89-6f3382582cab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.586559] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 849.586906] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 849.587214] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Deleting the datastore file [datastore1] f48b40ab-23f2-4071-8168-e7e2411ad64d {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 849.590578] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06619bf2-62b5-4049-8429-ce6ddcbd1bf8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.599020] env[61852]: DEBUG oslo_vmware.api [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for the task: (returnval){ [ 849.599020] env[61852]: value = "task-1292974" [ 849.599020] env[61852]: _type = "Task" [ 849.599020] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.603267] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.607475] env[61852]: DEBUG oslo_vmware.api [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.757452] env[61852]: DEBUG nova.network.neutron [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Successfully updated port: 83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 849.795907] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292968, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602753} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.796340] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] aeaa2828-6d83-4b26-bd1c-5f654c70713f/aeaa2828-6d83-4b26-bd1c-5f654c70713f.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 849.796589] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 849.797430] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5530e7f6-2070-4e10-9d74-8de07297e5d2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.806767] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for the task: (returnval){ [ 849.806767] env[61852]: value = "task-1292975" [ 849.806767] env[61852]: _type = "Task" [ 849.806767] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.816359] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.835164] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Releasing lock "refresh_cache-21d74604-6a64-44ee-a012-ebff7166853e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.835740] env[61852]: DEBUG nova.compute.manager [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Instance network_info: |[{"id": "3d08b2a9-48bc-4f9f-bf97-a408e60853b0", "address": "fa:16:3e:d7:aa:23", "network": {"id": "eed7489b-d5d5-4cbe-b602-9d2203abb4c8", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1239226515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab258d1a24f3459d95421bcb84287f85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d08b2a9-48", "ovs_interfaceid": "3d08b2a9-48bc-4f9f-bf97-a408e60853b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 849.836295] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:aa:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d08b2a9-48bc-4f9f-bf97-a408e60853b0', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.845705] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Creating folder: Project (ab258d1a24f3459d95421bcb84287f85). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 849.846118] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61a92bfd-e263-43d6-ab30-8774cb20d0e4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.860601] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a6e790dd-9ab0-4b11-81bd-69dbffb3d4bc tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "e265a4be-7b37-40b5-a199-42a7cd945f66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.656s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.863886] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Created folder: Project (ab258d1a24f3459d95421bcb84287f85) in parent group-v277280. [ 849.863886] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Creating folder: Instances. Parent ref: group-v277364. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 849.865446] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e351bb5-c09b-43fe-8827-2303761f312c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.876103] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Created folder: Instances in parent group-v277364. [ 849.876990] env[61852]: DEBUG oslo.service.loopingcall [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 849.877266] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 849.877673] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83497d29-dff4-44f7-ab10-64e221a1977b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.900929] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 849.900929] env[61852]: value = "task-1292978" [ 849.900929] env[61852]: _type = "Task" [ 849.900929] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.909304] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292978, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.919355] env[61852]: DEBUG nova.network.neutron [-] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.000023] env[61852]: DEBUG oslo_vmware.api [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292972, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194266} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.002509] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 850.002828] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 850.003075] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.003309] env[61852]: INFO nova.compute.manager [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 850.003604] env[61852]: DEBUG oslo.service.loopingcall [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.004772] env[61852]: DEBUG nova.compute.manager [-] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 850.004772] env[61852]: DEBUG nova.network.neutron [-] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 850.014338] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.073393] env[61852]: DEBUG nova.compute.manager [req-fe2e3f2f-88d9-4dae-b9e3-4015943212f3 req-33785feb-0caf-46b1-9cb1-35bdaea5e60f service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Received event network-changed-3d08b2a9-48bc-4f9f-bf97-a408e60853b0 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.074312] env[61852]: DEBUG nova.compute.manager [req-fe2e3f2f-88d9-4dae-b9e3-4015943212f3 req-33785feb-0caf-46b1-9cb1-35bdaea5e60f service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Refreshing instance network info cache due to event network-changed-3d08b2a9-48bc-4f9f-bf97-a408e60853b0. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 850.074560] env[61852]: DEBUG oslo_concurrency.lockutils [req-fe2e3f2f-88d9-4dae-b9e3-4015943212f3 req-33785feb-0caf-46b1-9cb1-35bdaea5e60f service nova] Acquiring lock "refresh_cache-21d74604-6a64-44ee-a012-ebff7166853e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.074715] env[61852]: DEBUG oslo_concurrency.lockutils [req-fe2e3f2f-88d9-4dae-b9e3-4015943212f3 req-33785feb-0caf-46b1-9cb1-35bdaea5e60f service nova] Acquired lock "refresh_cache-21d74604-6a64-44ee-a012-ebff7166853e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.074914] env[61852]: DEBUG nova.network.neutron [req-fe2e3f2f-88d9-4dae-b9e3-4015943212f3 req-33785feb-0caf-46b1-9cb1-35bdaea5e60f service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Refreshing network info cache for port 3d08b2a9-48bc-4f9f-bf97-a408e60853b0 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.107658] env[61852]: DEBUG oslo_vmware.api [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Task: {'id': task-1292974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142628} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.109648] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 850.109648] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 850.109648] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.109648] env[61852]: INFO nova.compute.manager [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 850.109648] env[61852]: DEBUG oslo.service.loopingcall [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.110056] env[61852]: DEBUG nova.compute.manager [-] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 850.110056] env[61852]: DEBUG nova.network.neutron [-] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 850.261911] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.261911] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.261911] env[61852]: DEBUG nova.network.neutron [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.318494] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068897} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.318846] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 850.320360] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f230a9a-2d0c-44b8-9c27-90aa771201fd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.351349] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] aeaa2828-6d83-4b26-bd1c-5f654c70713f/aeaa2828-6d83-4b26-bd1c-5f654c70713f.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 850.353287] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa8a7441-9b3c-4886-8820-0a6e6ea9d502 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.376471] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for the task: (returnval){ [ 850.376471] env[61852]: value = "task-1292979" [ 850.376471] env[61852]: _type = "Task" [ 850.376471] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.385325] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292979, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.419019] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292978, 'name': CreateVM_Task, 'duration_secs': 0.418554} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.419217] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 850.422020] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.422020] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.422020] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 850.422020] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1832146e-1af9-4efb-9e89-a69460146dfd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.427245] env[61852]: INFO nova.compute.manager [-] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Took 1.31 seconds to deallocate network for instance. [ 850.436931] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for the task: (returnval){ [ 850.436931] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52293815-0958-718b-6253-7f3a2a866d22" [ 850.436931] env[61852]: _type = "Task" [ 850.436931] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.455237] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52293815-0958-718b-6253-7f3a2a866d22, 'name': SearchDatastore_Task, 'duration_secs': 0.010447} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.458715] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.459180] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 850.459465] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.459679] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.459910] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.462084] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe838eef-270a-403a-9bf3-dd3dbed6adc8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.471808] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.471976] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 850.473219] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8da1eb6e-93e1-454e-9551-b47e6655f851 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.484246] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for the task: (returnval){ [ 850.484246] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5255ade3-3559-74f6-0f74-41dbdbf78bf4" [ 850.484246] env[61852]: _type = "Task" [ 850.484246] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.494517] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5255ade3-3559-74f6-0f74-41dbdbf78bf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.504806] env[61852]: DEBUG nova.compute.manager [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Received event network-vif-deleted-d6bff739-5602-402b-8bb2-eb9bb4ab0bd7 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.505166] env[61852]: DEBUG nova.compute.manager [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Received event network-vif-plugged-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.505406] env[61852]: DEBUG oslo_concurrency.lockutils [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] Acquiring lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.505680] env[61852]: DEBUG oslo_concurrency.lockutils [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] Lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.505909] env[61852]: DEBUG oslo_concurrency.lockutils [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] Lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.506149] env[61852]: DEBUG nova.compute.manager [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] No waiting events found dispatching network-vif-plugged-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 850.506300] env[61852]: WARNING nova.compute.manager [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Received unexpected event network-vif-plugged-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b for instance with vm_state building and task_state spawning. [ 850.506455] env[61852]: DEBUG nova.compute.manager [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Received event network-changed-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.506618] env[61852]: DEBUG nova.compute.manager [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Refreshing instance network info cache due to event network-changed-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 850.507454] env[61852]: DEBUG oslo_concurrency.lockutils [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] Acquiring lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.516973] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.699676] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e340c26-1c93-49e1-bf5a-cb240548c259 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.707981] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337378d1-05ac-47c0-bafc-3d389df0abc4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.742252] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bf2a66-5709-4f4f-8507-f503b699fe55 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.752473] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd44b54-e7a2-4cc4-8762-2ac6bde1ff25 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.769665] env[61852]: DEBUG nova.compute.provider_tree [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.888050] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292979, 'name': ReconfigVM_Task, 'duration_secs': 0.404393} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.888524] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Reconfigured VM instance instance-00000047 to attach disk [datastore1] aeaa2828-6d83-4b26-bd1c-5f654c70713f/aeaa2828-6d83-4b26-bd1c-5f654c70713f.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.889319] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0abe209-16ba-490e-aeb3-3e9e8fc37a0a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.896609] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for the task: (returnval){ [ 850.896609] env[61852]: value = "task-1292980" [ 850.896609] env[61852]: _type = "Task" [ 850.896609] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.907571] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292980, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.934564] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.956194] env[61852]: DEBUG nova.network.neutron [-] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.964979] env[61852]: DEBUG nova.network.neutron [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.998553] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5255ade3-3559-74f6-0f74-41dbdbf78bf4, 'name': SearchDatastore_Task, 'duration_secs': 0.01024} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.003992] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b043804b-f24d-4ff9-a07d-57194959abc2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.012497] env[61852]: DEBUG nova.network.neutron [-] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.014211] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.018123] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for the task: (returnval){ [ 851.018123] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52029fef-5343-1323-8bf0-e3d3596a6bca" [ 851.018123] env[61852]: _type = "Task" [ 851.018123] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.037516] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52029fef-5343-1323-8bf0-e3d3596a6bca, 'name': SearchDatastore_Task, 'duration_secs': 0.017693} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.037906] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.038243] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 21d74604-6a64-44ee-a012-ebff7166853e/21d74604-6a64-44ee-a012-ebff7166853e.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 851.038599] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e8cde4c-94cb-4437-918e-64abb332759b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.046886] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for the task: (returnval){ [ 851.046886] env[61852]: value = "task-1292981" [ 851.046886] env[61852]: _type = "Task" [ 851.046886] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.062139] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292981, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.164500] env[61852]: DEBUG nova.network.neutron [req-fe2e3f2f-88d9-4dae-b9e3-4015943212f3 req-33785feb-0caf-46b1-9cb1-35bdaea5e60f service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Updated VIF entry in instance network info cache for port 3d08b2a9-48bc-4f9f-bf97-a408e60853b0. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 851.164905] env[61852]: DEBUG nova.network.neutron [req-fe2e3f2f-88d9-4dae-b9e3-4015943212f3 req-33785feb-0caf-46b1-9cb1-35bdaea5e60f service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Updating instance_info_cache with network_info: [{"id": "3d08b2a9-48bc-4f9f-bf97-a408e60853b0", "address": "fa:16:3e:d7:aa:23", "network": {"id": "eed7489b-d5d5-4cbe-b602-9d2203abb4c8", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1239226515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab258d1a24f3459d95421bcb84287f85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d08b2a9-48", "ovs_interfaceid": "3d08b2a9-48bc-4f9f-bf97-a408e60853b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.260239] env[61852]: DEBUG nova.network.neutron [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updating instance_info_cache with network_info: [{"id": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "address": "fa:16:3e:82:81:71", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83bdd4e5-89", "ovs_interfaceid": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.275836] env[61852]: DEBUG nova.scheduler.client.report [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 851.409670] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292980, 'name': Rename_Task, 'duration_secs': 0.16756} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.410020] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.410303] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-354c220c-64ce-4951-bc60-96cea6e995b3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.419998] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for the task: (returnval){ [ 851.419998] env[61852]: value = "task-1292982" [ 851.419998] env[61852]: _type = "Task" [ 851.419998] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.430545] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292982, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.461336] env[61852]: INFO nova.compute.manager [-] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Took 1.46 seconds to deallocate network for instance. [ 851.511497] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.519541] env[61852]: INFO nova.compute.manager [-] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Took 1.41 seconds to deallocate network for instance. [ 851.561605] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292981, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.667987] env[61852]: DEBUG oslo_concurrency.lockutils [req-fe2e3f2f-88d9-4dae-b9e3-4015943212f3 req-33785feb-0caf-46b1-9cb1-35bdaea5e60f service nova] Releasing lock "refresh_cache-21d74604-6a64-44ee-a012-ebff7166853e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.763854] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.764301] env[61852]: DEBUG nova.compute.manager [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Instance network_info: |[{"id": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "address": "fa:16:3e:82:81:71", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83bdd4e5-89", "ovs_interfaceid": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 851.764733] env[61852]: DEBUG oslo_concurrency.lockutils [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] Acquired lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.764984] env[61852]: DEBUG nova.network.neutron [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Refreshing network info cache for port 83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 851.767793] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:81:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd48f0ef6-34e5-44d4-8baf-4470ed96ce73', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 851.777090] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Creating folder: Project (1fdd2d4aeb954b6fae049090b32f657b). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.778913] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ea71c88-d588-4c84-bba4-0c1650d2c3ee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.782181] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.782846] env[61852]: DEBUG nova.compute.manager [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 851.789248] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.359s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.792140] env[61852]: INFO nova.compute.claims [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.804461] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Created folder: Project (1fdd2d4aeb954b6fae049090b32f657b) in parent group-v277280. [ 851.804679] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Creating folder: Instances. Parent ref: group-v277367. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.805120] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0aa52455-9762-4ab4-81d4-74949a4468d4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.815253] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Created folder: Instances in parent group-v277367. [ 851.815555] env[61852]: DEBUG oslo.service.loopingcall [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.815760] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 851.815997] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0500669-dc2f-4a4a-9f98-bde410c9181e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.837832] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 851.837832] env[61852]: value = "task-1292985" [ 851.837832] env[61852]: _type = "Task" [ 851.837832] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.846596] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292985, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.930982] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292982, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.969127] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.010136] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.026721] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.063088] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292981, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583838} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.063597] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 21d74604-6a64-44ee-a012-ebff7166853e/21d74604-6a64-44ee-a012-ebff7166853e.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.063975] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.064410] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1906bd45-eceb-42ae-8fe1-7ca933722607 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.073526] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for the task: (returnval){ [ 852.073526] env[61852]: value = "task-1292986" [ 852.073526] env[61852]: _type = "Task" [ 852.073526] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.087226] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292986, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.289987] env[61852]: DEBUG nova.compute.utils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 852.291579] env[61852]: DEBUG nova.compute.manager [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 852.291759] env[61852]: DEBUG nova.network.neutron [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 852.348645] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1292985, 'name': CreateVM_Task, 'duration_secs': 0.356529} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.348823] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 852.349566] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.349717] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.350048] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 852.350318] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68be8b97-036a-4d97-8960-13df7db176a5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.354955] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 852.354955] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5251bb20-699a-2272-cc39-319a74cbebc0" [ 852.354955] env[61852]: _type = "Task" [ 852.354955] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.362731] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5251bb20-699a-2272-cc39-319a74cbebc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.364199] env[61852]: DEBUG nova.policy [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5db98c1126cc41b5930b2e5fa823c330', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '783bc6968c91488293479f10b8dc92c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 852.433610] env[61852]: DEBUG oslo_vmware.api [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292982, 'name': PowerOnVM_Task, 'duration_secs': 0.530349} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.433999] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 852.434217] env[61852]: INFO nova.compute.manager [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Took 8.03 seconds to spawn the instance on the hypervisor. [ 852.434450] env[61852]: DEBUG nova.compute.manager [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 852.435283] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48d4a0b-f045-427b-8f83-4e2526ea4969 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.510435] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.584394] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292986, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104878} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.584760] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 852.585551] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d219dba-8185-4608-86a0-4017813a665f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.610327] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 21d74604-6a64-44ee-a012-ebff7166853e/21d74604-6a64-44ee-a012-ebff7166853e.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.611471] env[61852]: DEBUG nova.network.neutron [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updated VIF entry in instance network info cache for port 83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 852.611874] env[61852]: DEBUG nova.network.neutron [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updating instance_info_cache with network_info: [{"id": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "address": "fa:16:3e:82:81:71", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83bdd4e5-89", "ovs_interfaceid": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.613551] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d1e4811-88c6-4645-b827-8fe314194072 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.643817] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for the task: (returnval){ [ 852.643817] env[61852]: value = "task-1292987" [ 852.643817] env[61852]: _type = "Task" [ 852.643817] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.654676] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292987, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.746565] env[61852]: DEBUG nova.network.neutron [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Successfully created port: 40eb747f-021a-4082-9f8d-70a6af6a415e {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.795859] env[61852]: DEBUG nova.compute.manager [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 852.867816] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5251bb20-699a-2272-cc39-319a74cbebc0, 'name': SearchDatastore_Task, 'duration_secs': 0.02503} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.868601] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.868864] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 852.869133] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.869291] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.869494] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 852.869778] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e88aea4a-d040-4f43-9023-547c1f5901af {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.879479] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 852.879724] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 852.883052] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7a7c950-9431-452b-ab24-04ebc07b2a3e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.890018] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 852.890018] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520380f8-a648-519b-9fe0-d8089c6d99de" [ 852.890018] env[61852]: _type = "Task" [ 852.890018] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.905922] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520380f8-a648-519b-9fe0-d8089c6d99de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.956781] env[61852]: INFO nova.compute.manager [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Took 27.01 seconds to build instance. [ 853.009646] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.024356] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fdb36d-ca46-2af2-fde1-79440cd967d6/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 853.025316] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484c6c79-9bac-4f65-ae24-04d0d3128648 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.037951] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fdb36d-ca46-2af2-fde1-79440cd967d6/disk-0.vmdk is in state: ready. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 853.037951] env[61852]: ERROR oslo_vmware.rw_handles [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fdb36d-ca46-2af2-fde1-79440cd967d6/disk-0.vmdk due to incomplete transfer. [ 853.037951] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e853fa24-3705-426c-a34f-da36ca98d1c7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.044269] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fdb36d-ca46-2af2-fde1-79440cd967d6/disk-0.vmdk. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 853.045354] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Uploaded image 8eadd208-fbd0-4fde-9723-395ea516a40e to the Glance image server {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 853.047522] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Destroying the VM {{(pid=61852) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 853.050495] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-58ae6198-2cad-46de-9707-74337356e288 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.056609] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 853.056609] env[61852]: value = "task-1292988" [ 853.056609] env[61852]: _type = "Task" [ 853.056609] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.065152] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292988, 'name': Destroy_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.135377] env[61852]: DEBUG oslo_concurrency.lockutils [req-8db6f3b6-b7be-4519-9699-e87b586e77f9 req-2c394405-d245-4531-ad2d-b35eee2ec63f service nova] Releasing lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.141438] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb5fbd2-82ff-40b2-915a-513dbf6287f0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.158127] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea49e2e-acf0-46ea-9c48-5044e3a4034d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.160040] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292987, 'name': ReconfigVM_Task, 'duration_secs': 0.384944} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.160350] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 21d74604-6a64-44ee-a012-ebff7166853e/21d74604-6a64-44ee-a012-ebff7166853e.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.161394] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-744836df-6f0a-4beb-99c4-b5ac6dabe1f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.195219] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d115a125-a96f-4b2e-9abb-7fcc6c13c0b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.199705] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for the task: (returnval){ [ 853.199705] env[61852]: value = "task-1292989" [ 853.199705] env[61852]: _type = "Task" [ 853.199705] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.208014] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8595c83-39f3-410c-904a-5980a7d53788 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.212891] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292989, 'name': Rename_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.225332] env[61852]: DEBUG nova.compute.provider_tree [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.402779] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520380f8-a648-519b-9fe0-d8089c6d99de, 'name': SearchDatastore_Task, 'duration_secs': 0.012243} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.403712] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3208339-18f6-438e-ad50-bf5f4100efdb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.410040] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 853.410040] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529990b0-19fa-0586-02ad-68e7722b5e8e" [ 853.410040] env[61852]: _type = "Task" [ 853.410040] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.418846] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529990b0-19fa-0586-02ad-68e7722b5e8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.458615] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c053658f-2f07-4cac-8e43-b3bd9d9c2f53 tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.714s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.511876] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.566639] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292988, 'name': Destroy_Task} progress is 33%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.571718] env[61852]: DEBUG nova.compute.manager [req-881ff5ad-264a-40a3-bffb-023838004d47 req-2322e2d1-e97e-434c-9f27-1d4cf274e2b4 service nova] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Received event network-vif-deleted-ebf57cb3-0f32-48ff-a39a-bc6c32d02167 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.572174] env[61852]: DEBUG nova.compute.manager [req-881ff5ad-264a-40a3-bffb-023838004d47 req-2322e2d1-e97e-434c-9f27-1d4cf274e2b4 service nova] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Received event network-vif-deleted-8f488560-af02-4742-8338-8d0855707346 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.708680] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292989, 'name': Rename_Task, 'duration_secs': 0.175451} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.708976] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 853.709367] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04e68e53-3f2a-4644-bf09-07e87388ee38 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.716686] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for the task: (returnval){ [ 853.716686] env[61852]: value = "task-1292990" [ 853.716686] env[61852]: _type = "Task" [ 853.716686] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.724494] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292990, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.730699] env[61852]: DEBUG nova.scheduler.client.report [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 853.808248] env[61852]: DEBUG nova.compute.manager [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 853.844691] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.845125] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.845298] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.845503] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.845661] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.845811] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.846109] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.846388] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.846771] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.847044] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.847297] env[61852]: DEBUG nova.virt.hardware [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.848248] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ddd164-887b-4f5f-9c7e-3b8235fbc20b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.856823] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32afaa3-1fed-4c83-9f3b-b0a40d2304bc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.921105] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529990b0-19fa-0586-02ad-68e7722b5e8e, 'name': SearchDatastore_Task, 'duration_secs': 0.009582} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.921384] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.921655] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 8d8679db-eb9d-45c1-b053-70378f58e273/8d8679db-eb9d-45c1-b053-70378f58e273.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 853.921932] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05cf221e-62a6-483e-90ee-fd65dbae3853 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.928605] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 853.928605] env[61852]: value = "task-1292991" [ 853.928605] env[61852]: _type = "Task" [ 853.928605] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.936927] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1292991, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.011898] env[61852]: DEBUG oslo_vmware.api [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1292964, 'name': ReconfigVM_Task, 'duration_secs': 5.816434} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.012241] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.012516] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Reconfigured VM to detach interface {{(pid=61852) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 854.074931] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292988, 'name': Destroy_Task, 'duration_secs': 0.777239} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.075141] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Destroyed the VM [ 854.075383] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Deleting Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 854.076057] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-172091e3-1d27-499a-8f69-489c43537840 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.082163] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 854.082163] env[61852]: value = "task-1292992" [ 854.082163] env[61852]: _type = "Task" [ 854.082163] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.097761] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292992, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.134985] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "eae1ad1f-f213-4227-93aa-b0ccf660e638" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.135315] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "eae1ad1f-f213-4227-93aa-b0ccf660e638" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.233091] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292990, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.236304] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.237412] env[61852]: DEBUG nova.compute.manager [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 854.240707] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.040s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.241167] env[61852]: DEBUG nova.objects.instance [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lazy-loading 'resources' on Instance uuid 0ec1210f-7d42-4b71-abdc-9f818ffb91ea {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.441319] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1292991, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.519255] env[61852]: DEBUG nova.network.neutron [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Successfully updated port: 40eb747f-021a-4082-9f8d-70a6af6a415e {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.592917] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292992, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.638368] env[61852]: DEBUG nova.compute.manager [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 854.729963] env[61852]: DEBUG oslo_vmware.api [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1292990, 'name': PowerOnVM_Task, 'duration_secs': 0.560942} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.729963] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 854.729963] env[61852]: INFO nova.compute.manager [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Took 7.90 seconds to spawn the instance on the hypervisor. [ 854.730291] env[61852]: DEBUG nova.compute.manager [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 854.731751] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dba1499-d667-41a9-b66c-a66bd83f3954 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.747437] env[61852]: DEBUG nova.compute.utils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 854.750436] env[61852]: DEBUG nova.compute.manager [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 854.750436] env[61852]: DEBUG nova.network.neutron [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 854.800263] env[61852]: DEBUG nova.policy [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47b9c159fc5547579b0e429b09d92760', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8dff8d945da948a89ee0fb2e2ddd0f9b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 854.842035] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquiring lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.842603] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.843141] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquiring lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.843500] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.843914] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.849174] env[61852]: INFO nova.compute.manager [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Terminating instance [ 854.854160] env[61852]: DEBUG nova.compute.manager [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 854.854160] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 854.854160] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c5aaba-8f36-487d-99c4-6b47154e9c95 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.863908] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.864439] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bd598e8-b545-4e11-9f36-c511081444f2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.875455] env[61852]: DEBUG oslo_vmware.api [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for the task: (returnval){ [ 854.875455] env[61852]: value = "task-1292993" [ 854.875455] env[61852]: _type = "Task" [ 854.875455] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.889027] env[61852]: DEBUG oslo_vmware.api [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292993, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.944505] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1292991, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.562271} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.944874] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 8d8679db-eb9d-45c1-b053-70378f58e273/8d8679db-eb9d-45c1-b053-70378f58e273.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 854.945158] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 854.945735] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7a22292-2f76-4e9c-a561-d0bfc0b4abc5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.956178] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 854.956178] env[61852]: value = "task-1292994" [ 854.956178] env[61852]: _type = "Task" [ 854.956178] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.969564] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1292994, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.021999] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "refresh_cache-254919cb-e3cd-4288-8696-95e632d78a38" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.022429] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "refresh_cache-254919cb-e3cd-4288-8696-95e632d78a38" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.022429] env[61852]: DEBUG nova.network.neutron [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.092630] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1292992, 'name': RemoveSnapshot_Task, 'duration_secs': 0.702856} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.093964] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Deleted Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 855.094308] env[61852]: DEBUG nova.compute.manager [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 855.095693] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2614875d-4f44-4e47-86bf-80e4c605275e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.099297] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ef8522-212f-4cec-a989-5a61f8398a33 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.103841] env[61852]: DEBUG nova.compute.manager [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received event network-vif-deleted-669836ae-c7e6-440f-b9bf-84b0d95a595e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 855.104094] env[61852]: INFO nova.compute.manager [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Neutron deleted interface 669836ae-c7e6-440f-b9bf-84b0d95a595e; detaching it from the instance and deleting it from the info cache [ 855.104397] env[61852]: DEBUG nova.network.neutron [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3f3d9b5-9c27-4415-b02c-58c0b1133727", "address": "fa:16:3e:ab:83:17", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3f3d9b5-9c", "ovs_interfaceid": "b3f3d9b5-9c27-4415-b02c-58c0b1133727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.109106] env[61852]: DEBUG nova.network.neutron [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Successfully created port: 9444dd57-04ba-4f44-8080-68ec800cc9b6 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 855.123714] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd5d81a-97e4-4df5-a588-996a18040920 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.162509] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4f4d0f-1c10-4118-8e33-c270a0eb9a7c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.174249] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325e51c1-a223-4ffc-a063-fa144d7cac40 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.179531] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.196833] env[61852]: DEBUG nova.compute.provider_tree [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.252633] env[61852]: INFO nova.compute.manager [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Took 27.26 seconds to build instance. [ 855.254982] env[61852]: DEBUG nova.compute.manager [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 855.391020] env[61852]: DEBUG oslo_vmware.api [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292993, 'name': PowerOffVM_Task, 'duration_secs': 0.237486} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.392433] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.392620] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.392899] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b9f888c-65c1-42ee-9e98-86c1a0966124 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.462805] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.463089] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.463300] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Deleting the datastore file [datastore1] aeaa2828-6d83-4b26-bd1c-5f654c70713f {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.464185] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.464413] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.464598] env[61852]: DEBUG nova.network.neutron [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.468165] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9c5c6f7-ae8c-4473-8df2-25e256b72198 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.473956] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1292994, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063002} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.474618] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 855.475487] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd51bd1-bef1-46b3-801b-e757e398e48a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.479754] env[61852]: DEBUG oslo_vmware.api [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for the task: (returnval){ [ 855.479754] env[61852]: value = "task-1292996" [ 855.479754] env[61852]: _type = "Task" [ 855.479754] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.502362] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 8d8679db-eb9d-45c1-b053-70378f58e273/8d8679db-eb9d-45c1-b053-70378f58e273.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.503670] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2662e33a-902f-47b7-94fd-0c8681a260ee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.522169] env[61852]: DEBUG oslo_vmware.api [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.527697] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 855.527697] env[61852]: value = "task-1292997" [ 855.527697] env[61852]: _type = "Task" [ 855.527697] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.538225] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1292997, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.562072] env[61852]: DEBUG nova.network.neutron [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.611963] env[61852]: DEBUG oslo_concurrency.lockutils [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.613259] env[61852]: DEBUG oslo_concurrency.lockutils [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Acquired lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.615861] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2d2e0b-b811-4b99-9ca5-2145348e7cac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.619946] env[61852]: DEBUG nova.compute.manager [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Received event network-vif-plugged-40eb747f-021a-4082-9f8d-70a6af6a415e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 855.620178] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] Acquiring lock "254919cb-e3cd-4288-8696-95e632d78a38-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.620414] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] Lock "254919cb-e3cd-4288-8696-95e632d78a38-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.620557] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] Lock "254919cb-e3cd-4288-8696-95e632d78a38-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.620728] env[61852]: DEBUG nova.compute.manager [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] No waiting events found dispatching network-vif-plugged-40eb747f-021a-4082-9f8d-70a6af6a415e {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 855.620893] env[61852]: WARNING nova.compute.manager [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Received unexpected event network-vif-plugged-40eb747f-021a-4082-9f8d-70a6af6a415e for instance with vm_state building and task_state spawning. [ 855.621094] env[61852]: DEBUG nova.compute.manager [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received event network-vif-deleted-b3f3d9b5-9c27-4415-b02c-58c0b1133727 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 855.622301] env[61852]: INFO nova.compute.manager [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Neutron deleted interface b3f3d9b5-9c27-4415-b02c-58c0b1133727; detaching it from the instance and deleting it from the info cache [ 855.622301] env[61852]: DEBUG nova.network.neutron [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.624169] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.645662] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1f9017-4b05-4d46-b5ec-2202a4a7fdf1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.648842] env[61852]: INFO nova.compute.manager [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Shelve offloading [ 855.651514] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.655022] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05348d92-9342-455d-834c-d33a3f60938c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.675793] env[61852]: DEBUG nova.virt.vmwareapi.vmops [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Reconfiguring VM to detach interface {{(pid=61852) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 855.676660] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a90aa85-bc3d-437c-867f-97f446bc6679 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.696618] env[61852]: DEBUG nova.scheduler.client.report [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.701016] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 855.701016] env[61852]: value = "task-1292998" [ 855.701016] env[61852]: _type = "Task" [ 855.701016] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.706982] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Waiting for the task: (returnval){ [ 855.706982] env[61852]: value = "task-1292999" [ 855.706982] env[61852]: _type = "Task" [ 855.706982] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.717145] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] VM already powered off {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 855.717383] env[61852]: DEBUG nova.compute.manager [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 855.718164] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec30df54-ed55-47a3-b630-80e878dfd409 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.724978] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.728729] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.728908] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.729106] env[61852]: DEBUG nova.network.neutron [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.744064] env[61852]: DEBUG nova.network.neutron [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Updating instance_info_cache with network_info: [{"id": "40eb747f-021a-4082-9f8d-70a6af6a415e", "address": "fa:16:3e:5e:f8:1c", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40eb747f-02", "ovs_interfaceid": "40eb747f-021a-4082-9f8d-70a6af6a415e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.757309] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6024b3e2-0732-48f7-a7eb-f77e81410c25 tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "21d74604-6a64-44ee-a012-ebff7166853e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.612s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.991201] env[61852]: DEBUG oslo_vmware.api [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Task: {'id': task-1292996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299263} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.991568] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.992025] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.992100] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.992861] env[61852]: INFO nova.compute.manager [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 855.992861] env[61852]: DEBUG oslo.service.loopingcall [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.992861] env[61852]: DEBUG nova.compute.manager [-] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 855.992861] env[61852]: DEBUG nova.network.neutron [-] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 856.038701] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1292997, 'name': ReconfigVM_Task, 'duration_secs': 0.403338} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.040971] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 8d8679db-eb9d-45c1-b053-70378f58e273/8d8679db-eb9d-45c1-b053-70378f58e273.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.041623] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d72c4de7-908f-45f3-b6fb-7aa396c02002 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.048909] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 856.048909] env[61852]: value = "task-1293000" [ 856.048909] env[61852]: _type = "Task" [ 856.048909] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.057390] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293000, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.126532] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.203230] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.963s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.205617] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.257s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.206953] env[61852]: DEBUG nova.objects.instance [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lazy-loading 'resources' on Instance uuid 89970cff-cb49-4803-81a5-1675b0ea4aaf {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 856.219509] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.228262] env[61852]: INFO nova.scheduler.client.report [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Deleted allocations for instance 0ec1210f-7d42-4b71-abdc-9f818ffb91ea [ 856.243123] env[61852]: INFO nova.network.neutron [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Port b3f3d9b5-9c27-4415-b02c-58c0b1133727 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 856.243487] env[61852]: DEBUG nova.network.neutron [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [{"id": "9e5204e6-6870-43d3-986f-9ca080104e14", "address": "fa:16:3e:9f:59:1b", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e5204e6-68", "ovs_interfaceid": "9e5204e6-6870-43d3-986f-9ca080104e14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.246077] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "refresh_cache-254919cb-e3cd-4288-8696-95e632d78a38" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.246345] env[61852]: DEBUG nova.compute.manager [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Instance network_info: |[{"id": "40eb747f-021a-4082-9f8d-70a6af6a415e", "address": "fa:16:3e:5e:f8:1c", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40eb747f-02", "ovs_interfaceid": "40eb747f-021a-4082-9f8d-70a6af6a415e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 856.246726] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:f8:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3e0aae3-33d1-403b-bfaf-306f77a1422e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40eb747f-021a-4082-9f8d-70a6af6a415e', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.256566] env[61852]: DEBUG oslo.service.loopingcall [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.257426] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.257712] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3121a6f1-5f4c-49e6-a8b2-6a116e3b75ac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.276332] env[61852]: DEBUG nova.compute.manager [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 856.287309] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.287309] env[61852]: value = "task-1293001" [ 856.287309] env[61852]: _type = "Task" [ 856.287309] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.295375] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293001, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.309268] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 856.309564] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 856.309726] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.309914] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 856.310576] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.310784] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 856.311722] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 856.311722] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 856.311722] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 856.311722] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 856.311722] env[61852]: DEBUG nova.virt.hardware [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.312542] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc2a397-e83d-4699-95b6-c49a9a6079fb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.320953] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b882d31-5de4-45e4-b1f1-f6b2bcfcfa12 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.528160] env[61852]: DEBUG nova.network.neutron [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Updating instance_info_cache with network_info: [{"id": "ea60304c-08b8-4035-8ece-fc40b1b508b1", "address": "fa:16:3e:8c:e4:12", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea60304c-08", "ovs_interfaceid": "ea60304c-08b8-4035-8ece-fc40b1b508b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.562362] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293000, 'name': Rename_Task, 'duration_secs': 0.164769} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.562895] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 856.562975] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3126a83-3669-4635-a70b-e449b96edb6f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.569475] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 856.569475] env[61852]: value = "task-1293002" [ 856.569475] env[61852]: _type = "Task" [ 856.569475] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.577902] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293002, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.717397] env[61852]: DEBUG nova.network.neutron [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Successfully updated port: 9444dd57-04ba-4f44-8080-68ec800cc9b6 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 856.731972] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.745294] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fb7bf7d-cfa3-4174-9187-df2de4454822 tempest-SecurityGroupsTestJSON-2040870376 tempest-SecurityGroupsTestJSON-2040870376-project-member] Lock "0ec1210f-7d42-4b71-abdc-9f818ffb91ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.519s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.746556] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.804175] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293001, 'name': CreateVM_Task, 'duration_secs': 0.37333} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.804403] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 856.805174] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.805355] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.805699] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 856.805972] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58022f8e-8837-4723-b9ec-0ea05916a3fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.814495] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 856.814495] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f06280-fe44-cf2b-f63e-d73858ee84ae" [ 856.814495] env[61852]: _type = "Task" [ 856.814495] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.823373] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f06280-fe44-cf2b-f63e-d73858ee84ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.030122] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6fe24f-f018-4d6a-818c-2256a10728f8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.034372] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.042245] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3127b1-7bf3-447b-8fbe-d69d948a2fe4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.077835] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc0f57a-0546-45ce-bd8f-6ed3c84f7d6e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.088133] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293002, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.090938] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1411574a-568c-42c8-9009-42eb9bfa036a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.107318] env[61852]: DEBUG nova.compute.provider_tree [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.113998] env[61852]: DEBUG nova.network.neutron [-] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.133775] env[61852]: DEBUG nova.compute.manager [req-fc45499f-2a2b-468a-aca7-8da30293265a req-c6828441-052c-4e15-a3f7-70d62ab7f0c2 service nova] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Received event network-vif-deleted-2dae9519-f301-4529-81ef-5ee1ee9c0718 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 857.219698] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.226749] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.226942] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.227134] env[61852]: DEBUG nova.network.neutron [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 857.250909] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe4276cd-dc2b-4919-a432-bea3a809a392 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d3922357-383f-4f7e-9c76-4eb688a092b9-b3f3d9b5-9c27-4415-b02c-58c0b1133727" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.874s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.326673] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f06280-fe44-cf2b-f63e-d73858ee84ae, 'name': SearchDatastore_Task, 'duration_secs': 0.013259} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.326673] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.326673] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.326673] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.326673] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.327050] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.327339] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6684b288-b57c-4554-bf45-9cc6d20e820c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.336141] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.336376] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 857.337771] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f1ba2bc-1b84-4eab-b639-5dd74b373932 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.347033] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 857.347033] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d12bde-aa6e-7573-6204-287aa88de20d" [ 857.347033] env[61852]: _type = "Task" [ 857.347033] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.353858] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d12bde-aa6e-7573-6204-287aa88de20d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.354934] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 857.355429] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7581b903-c15c-47f8-901b-ad8c5b0791b8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.362404] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 857.362689] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-096b6607-99a2-4caf-adb2-6aee736d9b36 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.430633] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 857.430865] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 857.431065] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleting the datastore file [datastore2] 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 857.431384] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c0dad20-33d7-4cb0-9156-c1c930d338ba {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.439154] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 857.439154] env[61852]: value = "task-1293004" [ 857.439154] env[61852]: _type = "Task" [ 857.439154] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.447219] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.583386] env[61852]: DEBUG oslo_vmware.api [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293002, 'name': PowerOnVM_Task, 'duration_secs': 0.529054} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.583670] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 857.583890] env[61852]: INFO nova.compute.manager [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Took 8.25 seconds to spawn the instance on the hypervisor. [ 857.584082] env[61852]: DEBUG nova.compute.manager [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 857.584860] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa42e7a-2798-4eac-bab2-f9ec6078893b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.620383] env[61852]: DEBUG nova.scheduler.client.report [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 857.623275] env[61852]: INFO nova.compute.manager [-] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Took 1.63 seconds to deallocate network for instance. [ 857.720407] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.765462] env[61852]: DEBUG nova.network.neutron [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 857.787040] env[61852]: DEBUG nova.compute.manager [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Received event network-vif-plugged-9444dd57-04ba-4f44-8080-68ec800cc9b6 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 857.787284] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Acquiring lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.787531] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.787727] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.787902] env[61852]: DEBUG nova.compute.manager [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] No waiting events found dispatching network-vif-plugged-9444dd57-04ba-4f44-8080-68ec800cc9b6 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 857.788272] env[61852]: WARNING nova.compute.manager [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Received unexpected event network-vif-plugged-9444dd57-04ba-4f44-8080-68ec800cc9b6 for instance with vm_state building and task_state spawning. [ 857.788477] env[61852]: DEBUG nova.compute.manager [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Received event network-changed-9444dd57-04ba-4f44-8080-68ec800cc9b6 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 857.788640] env[61852]: DEBUG nova.compute.manager [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Refreshing instance network info cache due to event network-changed-9444dd57-04ba-4f44-8080-68ec800cc9b6. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 857.788838] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Acquiring lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.854100] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d12bde-aa6e-7573-6204-287aa88de20d, 'name': SearchDatastore_Task, 'duration_secs': 0.018431} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.855090] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceecf772-99cc-40e8-962b-e35dad8f70c1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.860188] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 857.860188] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529f54c5-a0fd-c124-5437-77c4bb26daac" [ 857.860188] env[61852]: _type = "Task" [ 857.860188] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.868989] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529f54c5-a0fd-c124-5437-77c4bb26daac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.907291] env[61852]: DEBUG nova.network.neutron [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Updating instance_info_cache with network_info: [{"id": "9444dd57-04ba-4f44-8080-68ec800cc9b6", "address": "fa:16:3e:95:4d:d6", "network": {"id": "f986fa1f-0449-45a5-86ee-66a7fe44ea49", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-901829409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8dff8d945da948a89ee0fb2e2ddd0f9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9444dd57-04", "ovs_interfaceid": "9444dd57-04ba-4f44-8080-68ec800cc9b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.949553] env[61852]: DEBUG oslo_vmware.api [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.435724} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.949866] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 857.950077] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 857.950264] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 857.978680] env[61852]: INFO nova.scheduler.client.report [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted allocations for instance 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9 [ 858.103052] env[61852]: INFO nova.compute.manager [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Took 30.08 seconds to build instance. [ 858.125322] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.920s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.127739] env[61852]: DEBUG oslo_concurrency.lockutils [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 10.625s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.127880] env[61852]: DEBUG nova.objects.instance [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61852) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 858.136055] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.150486] env[61852]: INFO nova.scheduler.client.report [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Deleted allocations for instance 89970cff-cb49-4803-81a5-1675b0ea4aaf [ 858.220240] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.373938] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529f54c5-a0fd-c124-5437-77c4bb26daac, 'name': SearchDatastore_Task, 'duration_secs': 0.012693} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.373938] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.373938] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 254919cb-e3cd-4288-8696-95e632d78a38/254919cb-e3cd-4288-8696-95e632d78a38.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.375010] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d019930-6841-4037-aabe-0a4e7a4da9ec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.382247] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 858.382247] env[61852]: value = "task-1293005" [ 858.382247] env[61852]: _type = "Task" [ 858.382247] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.390824] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293005, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.410211] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.410539] env[61852]: DEBUG nova.compute.manager [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Instance network_info: |[{"id": "9444dd57-04ba-4f44-8080-68ec800cc9b6", "address": "fa:16:3e:95:4d:d6", "network": {"id": "f986fa1f-0449-45a5-86ee-66a7fe44ea49", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-901829409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8dff8d945da948a89ee0fb2e2ddd0f9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9444dd57-04", "ovs_interfaceid": "9444dd57-04ba-4f44-8080-68ec800cc9b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 858.411266] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Acquired lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.411460] env[61852]: DEBUG nova.network.neutron [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Refreshing network info cache for port 9444dd57-04ba-4f44-8080-68ec800cc9b6 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 858.412627] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:4d:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '779b8e65-8b9e-427e-af08-910febd65bfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9444dd57-04ba-4f44-8080-68ec800cc9b6', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 858.421411] env[61852]: DEBUG oslo.service.loopingcall [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.422473] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 858.422704] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dacf1c79-2641-4d37-a162-6c73a8c1a9a4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.443513] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 858.443513] env[61852]: value = "task-1293006" [ 858.443513] env[61852]: _type = "Task" [ 858.443513] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.451695] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293006, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.483491] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.604855] env[61852]: DEBUG oslo_concurrency.lockutils [None req-feed105d-a8ba-4d4e-8bb6-70ec81a19c47 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "8d8679db-eb9d-45c1-b053-70378f58e273" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.128s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.655276] env[61852]: DEBUG oslo_vmware.rw_handles [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a2f0e-2c8a-9ba6-d8bd-3a8ae9aa3db2/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 858.656215] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44417087-f34d-460c-bffc-a6f4de44d9c2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.663674] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a26cc05d-f9a8-42a5-920c-5013aa5cfbc7 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "89970cff-cb49-4803-81a5-1675b0ea4aaf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.158s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.670273] env[61852]: DEBUG oslo_vmware.rw_handles [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a2f0e-2c8a-9ba6-d8bd-3a8ae9aa3db2/disk-0.vmdk is in state: ready. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 858.670482] env[61852]: ERROR oslo_vmware.rw_handles [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a2f0e-2c8a-9ba6-d8bd-3a8ae9aa3db2/disk-0.vmdk due to incomplete transfer. [ 858.670745] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-482fbd09-0494-459d-903a-4504e10664a4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.678712] env[61852]: DEBUG oslo_vmware.rw_handles [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a2f0e-2c8a-9ba6-d8bd-3a8ae9aa3db2/disk-0.vmdk. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 858.678909] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Uploaded image f9405a8f-dc2e-4186-8a5b-95f60d4b7241 to the Glance image server {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 858.680603] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Destroying the VM {{(pid=61852) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 858.680707] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-47b50e10-6e55-4869-8cea-899dfaf77ce7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.687080] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 858.687080] env[61852]: value = "task-1293007" [ 858.687080] env[61852]: _type = "Task" [ 858.687080] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.694654] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293007, 'name': Destroy_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.722786] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.870835] env[61852]: DEBUG oslo_concurrency.lockutils [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "21d74604-6a64-44ee-a012-ebff7166853e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.871222] env[61852]: DEBUG oslo_concurrency.lockutils [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "21d74604-6a64-44ee-a012-ebff7166853e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.871556] env[61852]: DEBUG oslo_concurrency.lockutils [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "21d74604-6a64-44ee-a012-ebff7166853e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.871889] env[61852]: DEBUG oslo_concurrency.lockutils [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "21d74604-6a64-44ee-a012-ebff7166853e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.872204] env[61852]: DEBUG oslo_concurrency.lockutils [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "21d74604-6a64-44ee-a012-ebff7166853e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.874899] env[61852]: INFO nova.compute.manager [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Terminating instance [ 858.878057] env[61852]: DEBUG nova.compute.manager [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 858.878241] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 858.879212] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f3df80-cd93-4e99-8791-f85953cfde47 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.896843] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 858.897184] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293005, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.901355] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-655f3116-29d6-4ae4-b9a5-410a4e4fda52 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.911126] env[61852]: DEBUG oslo_vmware.api [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for the task: (returnval){ [ 858.911126] env[61852]: value = "task-1293008" [ 858.911126] env[61852]: _type = "Task" [ 858.911126] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.921695] env[61852]: DEBUG oslo_vmware.api [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1293008, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.958085] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293006, 'name': CreateVM_Task} progress is 25%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.138387] env[61852]: DEBUG oslo_concurrency.lockutils [None req-83e1466d-da3f-463b-bbc5-0def3e0a5058 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.140344] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.508s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.142173] env[61852]: DEBUG nova.objects.instance [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lazy-loading 'resources' on Instance uuid d93b8055-1eb2-4368-a051-289dc5a9d0ed {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 859.201092] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293007, 'name': Destroy_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.224085] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.301140] env[61852]: DEBUG nova.network.neutron [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Updated VIF entry in instance network info cache for port 9444dd57-04ba-4f44-8080-68ec800cc9b6. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 859.302602] env[61852]: DEBUG nova.network.neutron [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Updating instance_info_cache with network_info: [{"id": "9444dd57-04ba-4f44-8080-68ec800cc9b6", "address": "fa:16:3e:95:4d:d6", "network": {"id": "f986fa1f-0449-45a5-86ee-66a7fe44ea49", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-901829409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8dff8d945da948a89ee0fb2e2ddd0f9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9444dd57-04", "ovs_interfaceid": "9444dd57-04ba-4f44-8080-68ec800cc9b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.394891] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293005, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.762322} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.395324] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 254919cb-e3cd-4288-8696-95e632d78a38/254919cb-e3cd-4288-8696-95e632d78a38.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.395680] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.396046] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cedd42bf-64d8-4674-91f6-6a7833c0e808 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.405047] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 859.405047] env[61852]: value = "task-1293009" [ 859.405047] env[61852]: _type = "Task" [ 859.405047] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.413032] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293009, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.420272] env[61852]: DEBUG oslo_vmware.api [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1293008, 'name': PowerOffVM_Task, 'duration_secs': 0.345465} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.420513] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.420690] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 859.420965] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a814500-dac6-47aa-9d40-a6f30729be67 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.456671] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293006, 'name': CreateVM_Task, 'duration_secs': 0.787346} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.456881] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 859.457639] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.457855] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.458260] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 859.458509] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a749d9ea-f603-40f9-8ce3-b38c5f421145 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.463255] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 859.463255] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5254b228-3ccb-eb19-e510-f7f539798c3c" [ 859.463255] env[61852]: _type = "Task" [ 859.463255] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.473504] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5254b228-3ccb-eb19-e510-f7f539798c3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.499326] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 859.499514] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 859.499700] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Deleting the datastore file [datastore2] 21d74604-6a64-44ee-a012-ebff7166853e {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.499958] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46a28973-0227-43f5-8295-fa8be186f21c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.506618] env[61852]: DEBUG oslo_vmware.api [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for the task: (returnval){ [ 859.506618] env[61852]: value = "task-1293011" [ 859.506618] env[61852]: _type = "Task" [ 859.506618] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.517414] env[61852]: DEBUG oslo_vmware.api [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1293011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.704102] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293007, 'name': Destroy_Task, 'duration_secs': 0.543042} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.704102] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Destroyed the VM [ 859.704409] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Deleting Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 859.704791] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c564a642-5406-44b7-820a-a0ebfc729acf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.718154] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 859.718154] env[61852]: value = "task-1293012" [ 859.718154] env[61852]: _type = "Task" [ 859.718154] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.728992] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.731853] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293012, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.805242] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Releasing lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.805565] env[61852]: DEBUG nova.compute.manager [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Received event network-vif-unplugged-ea60304c-08b8-4035-8ece-fc40b1b508b1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 859.806236] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Acquiring lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.806236] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.806236] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.806432] env[61852]: DEBUG nova.compute.manager [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] No waiting events found dispatching network-vif-unplugged-ea60304c-08b8-4035-8ece-fc40b1b508b1 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 859.806641] env[61852]: WARNING nova.compute.manager [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Received unexpected event network-vif-unplugged-ea60304c-08b8-4035-8ece-fc40b1b508b1 for instance with vm_state shelved and task_state shelving_offloading. [ 859.806910] env[61852]: DEBUG nova.compute.manager [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Received event network-changed-ea60304c-08b8-4035-8ece-fc40b1b508b1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 859.806991] env[61852]: DEBUG nova.compute.manager [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Refreshing instance network info cache due to event network-changed-ea60304c-08b8-4035-8ece-fc40b1b508b1. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 859.807225] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Acquiring lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.807401] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Acquired lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.807554] env[61852]: DEBUG nova.network.neutron [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Refreshing network info cache for port ea60304c-08b8-4035-8ece-fc40b1b508b1 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 859.916758] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.917017] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.923953] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293009, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061871} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.924706] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.925835] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffda3ef6-4954-4733-8281-b945e6763549 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.950499] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 254919cb-e3cd-4288-8696-95e632d78a38/254919cb-e3cd-4288-8696-95e632d78a38.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.953664] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ad3f81f-f7d9-4979-967c-74e732258cfd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.978532] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5254b228-3ccb-eb19-e510-f7f539798c3c, 'name': SearchDatastore_Task, 'duration_secs': 0.009764} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.982213] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.982725] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 859.982725] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.982890] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.983048] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 859.983353] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 859.983353] env[61852]: value = "task-1293013" [ 859.983353] env[61852]: _type = "Task" [ 859.983353] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.984022] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6356055-4cd8-40ef-936c-11a3300f5b3c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.994056] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293013, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.995588] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 859.995781] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 859.997447] env[61852]: DEBUG oslo_concurrency.lockutils [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.997676] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e42413bb-d457-468e-b8e6-50208cb89f3a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.000506] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d6c89f-3bbd-4f95-b427-58bea2210568 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.013618] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d02bd0-56c1-41d9-890f-1d4110e23342 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.016990] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 860.016990] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52dabe5b-c5d0-017b-29a5-15d9ff8b95cd" [ 860.016990] env[61852]: _type = "Task" [ 860.016990] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.048137] env[61852]: DEBUG oslo_vmware.api [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Task: {'id': task-1293011, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152717} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.049363] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67b8b9b-8fff-4c60-ac0e-b1b44b876db3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.051869] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 860.052109] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 860.052306] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 860.052488] env[61852]: INFO nova.compute.manager [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Took 1.17 seconds to destroy the instance on the hypervisor. [ 860.052737] env[61852]: DEBUG oslo.service.loopingcall [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.056303] env[61852]: DEBUG nova.compute.manager [-] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 860.056383] env[61852]: DEBUG nova.network.neutron [-] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 860.058044] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52dabe5b-c5d0-017b-29a5-15d9ff8b95cd, 'name': SearchDatastore_Task, 'duration_secs': 0.009127} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.059509] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-363b9b9d-92ae-4f2d-942b-1f4ad63387b4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.065128] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1f9c08-94f8-4f73-a214-3c2451be9c7a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.070331] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 860.070331] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52644de1-6933-007d-4691-ec4f884bbed4" [ 860.070331] env[61852]: _type = "Task" [ 860.070331] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.082161] env[61852]: DEBUG nova.compute.provider_tree [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.089318] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52644de1-6933-007d-4691-ec4f884bbed4, 'name': SearchDatastore_Task, 'duration_secs': 0.012774} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.089953] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.089953] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb/23ff3009-7b13-4d5e-93ed-ca1c3e9127bb.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 860.090307] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-199181e3-fef5-4e66-b159-b115adea0b4c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.097103] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 860.097103] env[61852]: value = "task-1293014" [ 860.097103] env[61852]: _type = "Task" [ 860.097103] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.104844] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.233810] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.237758] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293012, 'name': RemoveSnapshot_Task} progress is 50%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.425314] env[61852]: DEBUG nova.compute.manager [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 860.499222] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293013, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.585557] env[61852]: DEBUG nova.scheduler.client.report [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 860.608085] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293014, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.687352] env[61852]: DEBUG nova.compute.manager [req-4d48d072-00c9-4888-b2e7-19d1baec7ccc req-2495a414-5418-476b-ab77-567fdf941414 service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Received event network-vif-deleted-3d08b2a9-48bc-4f9f-bf97-a408e60853b0 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 860.687692] env[61852]: INFO nova.compute.manager [req-4d48d072-00c9-4888-b2e7-19d1baec7ccc req-2495a414-5418-476b-ab77-567fdf941414 service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Neutron deleted interface 3d08b2a9-48bc-4f9f-bf97-a408e60853b0; detaching it from the instance and deleting it from the info cache [ 860.687917] env[61852]: DEBUG nova.network.neutron [req-4d48d072-00c9-4888-b2e7-19d1baec7ccc req-2495a414-5418-476b-ab77-567fdf941414 service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.689978] env[61852]: DEBUG nova.network.neutron [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Updated VIF entry in instance network info cache for port ea60304c-08b8-4035-8ece-fc40b1b508b1. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 860.691061] env[61852]: DEBUG nova.network.neutron [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Updating instance_info_cache with network_info: [{"id": "ea60304c-08b8-4035-8ece-fc40b1b508b1", "address": "fa:16:3e:8c:e4:12", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": null, "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapea60304c-08", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.737778] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.740785] env[61852]: DEBUG oslo_vmware.api [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293012, 'name': RemoveSnapshot_Task, 'duration_secs': 0.843669} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.741180] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Deleted Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 860.741488] env[61852]: INFO nova.compute.manager [None req-ffbc03d7-9cdb-4ee2-98b2-9546393f487d tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Took 16.24 seconds to snapshot the instance on the hypervisor. [ 860.949265] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.996732] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293013, 'name': ReconfigVM_Task, 'duration_secs': 0.88632} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.997056] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 254919cb-e3cd-4288-8696-95e632d78a38/254919cb-e3cd-4288-8696-95e632d78a38.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.997678] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3163dd6e-5c86-4d0e-b47f-11f312d2cfaf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.003677] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 861.003677] env[61852]: value = "task-1293015" [ 861.003677] env[61852]: _type = "Task" [ 861.003677] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.012309] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293015, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.037159] env[61852]: DEBUG nova.network.neutron [-] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.090780] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.950s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.093016] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.490s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.096570] env[61852]: INFO nova.compute.claims [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.108393] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293014, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.110728] env[61852]: INFO nova.scheduler.client.report [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted allocations for instance d93b8055-1eb2-4368-a051-289dc5a9d0ed [ 861.193295] env[61852]: DEBUG oslo_concurrency.lockutils [req-df76baea-b3d3-44e7-865c-aedc3ffc8112 req-d6baade6-58ef-4d09-9ecd-594a40c3e84c service nova] Releasing lock "refresh_cache-00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.194066] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bfa62fd8-d9ad-4935-b16a-707ae8e19dfc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.203113] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87affb9-9194-4464-af90-b6c5ba6c2353 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.225818] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.233887] env[61852]: DEBUG nova.compute.manager [req-4d48d072-00c9-4888-b2e7-19d1baec7ccc req-2495a414-5418-476b-ab77-567fdf941414 service nova] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Detach interface failed, port_id=3d08b2a9-48bc-4f9f-bf97-a408e60853b0, reason: Instance 21d74604-6a64-44ee-a012-ebff7166853e could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 861.378068] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "f8ebb1b7-39c6-486e-ab25-23080d858846" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.378365] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "f8ebb1b7-39c6-486e-ab25-23080d858846" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.378585] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "f8ebb1b7-39c6-486e-ab25-23080d858846-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.379303] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "f8ebb1b7-39c6-486e-ab25-23080d858846-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.379303] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "f8ebb1b7-39c6-486e-ab25-23080d858846-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.383385] env[61852]: INFO nova.compute.manager [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Terminating instance [ 861.386038] env[61852]: DEBUG nova.compute.manager [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.386038] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.387042] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed85368a-7509-4e93-8f85-65f120a20e8d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.394445] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.394653] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f93d234-163f-4181-b233-491a2301f7e3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.405545] env[61852]: DEBUG oslo_vmware.api [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 861.405545] env[61852]: value = "task-1293016" [ 861.405545] env[61852]: _type = "Task" [ 861.405545] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.412992] env[61852]: DEBUG oslo_vmware.api [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1293016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.441742] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.442084] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.442314] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.442518] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.442711] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.445049] env[61852]: INFO nova.compute.manager [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Terminating instance [ 861.447063] env[61852]: DEBUG nova.compute.manager [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.447287] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.448134] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a43230-e357-42d3-8dfd-319a1f36eee9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.455672] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.455672] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a60d36a-edb6-4e01-a733-98cac5c539f0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.462035] env[61852]: DEBUG oslo_vmware.api [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 861.462035] env[61852]: value = "task-1293017" [ 861.462035] env[61852]: _type = "Task" [ 861.462035] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.472346] env[61852]: DEBUG oslo_vmware.api [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1293017, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.513907] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293015, 'name': Rename_Task, 'duration_secs': 0.454199} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.514480] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 861.514480] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61a2d30a-af5b-4f0f-9bee-fd0d0baf624d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.521349] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 861.521349] env[61852]: value = "task-1293018" [ 861.521349] env[61852]: _type = "Task" [ 861.521349] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.529968] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293018, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.540079] env[61852]: INFO nova.compute.manager [-] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Took 1.48 seconds to deallocate network for instance. [ 861.614062] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293014, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.618914] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9a90b361-3447-4d5d-a9d9-58510e3dbb58 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "d93b8055-1eb2-4368-a051-289dc5a9d0ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.110s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.728424] env[61852]: DEBUG oslo_vmware.api [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Task: {'id': task-1292999, 'name': ReconfigVM_Task, 'duration_secs': 5.891923} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.728783] env[61852]: DEBUG oslo_concurrency.lockutils [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] Releasing lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.729083] env[61852]: DEBUG nova.virt.vmwareapi.vmops [req-192d18d4-4bc4-4edb-9ffa-bc2bd641bc0b req-90fdd9c5-93d3-4ed1-bd0b-dc2626159b93 service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Reconfigured VM to detach interface {{(pid=61852) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 861.729686] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.106s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.729950] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.730276] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.730501] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.733141] env[61852]: INFO nova.compute.manager [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Terminating instance [ 861.735977] env[61852]: DEBUG nova.compute.manager [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.736334] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.737232] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60179286-1e2e-4237-9714-a59e3ad0060c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.744854] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.745450] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f15c004-beb5-449b-9979-b3921f3dc2fa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.752396] env[61852]: DEBUG oslo_vmware.api [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 861.752396] env[61852]: value = "task-1293019" [ 861.752396] env[61852]: _type = "Task" [ 861.752396] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.764680] env[61852]: DEBUG oslo_vmware.api [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293019, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.914154] env[61852]: DEBUG oslo_vmware.api [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1293016, 'name': PowerOffVM_Task, 'duration_secs': 0.210391} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.914490] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 861.914684] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 861.914990] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7baa2188-4869-4e14-9a87-2f6081127531 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.972119] env[61852]: DEBUG oslo_vmware.api [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1293017, 'name': PowerOffVM_Task, 'duration_secs': 0.190749} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.972518] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 861.972834] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 861.973642] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3f56fb4-9743-4cd2-a30b-bd62c273902a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.984226] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 861.984474] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 861.984663] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Deleting the datastore file [datastore2] f8ebb1b7-39c6-486e-ab25-23080d858846 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 861.985358] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7c1d3e5-96f4-43c4-a13c-c2adec03cf78 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.991697] env[61852]: DEBUG oslo_vmware.api [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 861.991697] env[61852]: value = "task-1293022" [ 861.991697] env[61852]: _type = "Task" [ 861.991697] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.000833] env[61852]: DEBUG oslo_vmware.api [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1293022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.031745] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293018, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.037304] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.037553] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.037817] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Deleting the datastore file [datastore2] 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.039063] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6a12d90-cf0f-4acb-aabb-678c06bc6d91 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.044417] env[61852]: DEBUG oslo_vmware.api [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for the task: (returnval){ [ 862.044417] env[61852]: value = "task-1293023" [ 862.044417] env[61852]: _type = "Task" [ 862.044417] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.053540] env[61852]: DEBUG oslo_concurrency.lockutils [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.054188] env[61852]: DEBUG oslo_vmware.api [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1293023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.112092] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293014, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.517808} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.113468] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb/23ff3009-7b13-4d5e-93ed-ca1c3e9127bb.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 862.113468] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 862.113468] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0f983e1d-663e-4a0a-b284-45a0737458e1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.119979] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 862.119979] env[61852]: value = "task-1293024" [ 862.119979] env[61852]: _type = "Task" [ 862.119979] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.130454] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293024, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.261926] env[61852]: DEBUG oslo_vmware.api [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293019, 'name': PowerOffVM_Task, 'duration_secs': 0.220777} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.264486] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.264666] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.265962] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5067c45e-9ebf-4916-91e2-c9ee562a404a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.335707] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.336350] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.336635] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Deleting the datastore file [datastore1] d3922357-383f-4f7e-9c76-4eb688a092b9 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.337567] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bc7e9b4-956a-475d-bd86-b320e862c753 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.354938] env[61852]: DEBUG oslo_vmware.api [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 862.354938] env[61852]: value = "task-1293026" [ 862.354938] env[61852]: _type = "Task" [ 862.354938] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.355362] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.355671] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.369965] env[61852]: DEBUG oslo_vmware.api [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.392074] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d436fa3b-33a3-43bb-a0b8-bc91474df3ee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.400385] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2b0852-b40a-4393-90ad-fd19c5000f1d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.435556] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfbbd5e-10b4-4105-a5c5-d3189b495f0e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.444078] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31553cc-be59-4092-92bd-9f50958a6d7e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.457855] env[61852]: DEBUG nova.compute.provider_tree [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.502427] env[61852]: DEBUG oslo_vmware.api [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1293022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160008} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.502726] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.502909] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.503187] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.503347] env[61852]: INFO nova.compute.manager [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Took 1.12 seconds to destroy the instance on the hypervisor. [ 862.503593] env[61852]: DEBUG oslo.service.loopingcall [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.503787] env[61852]: DEBUG nova.compute.manager [-] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.503882] env[61852]: DEBUG nova.network.neutron [-] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.532152] env[61852]: DEBUG oslo_vmware.api [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293018, 'name': PowerOnVM_Task, 'duration_secs': 0.581719} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.532441] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 862.532647] env[61852]: INFO nova.compute.manager [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Took 8.72 seconds to spawn the instance on the hypervisor. [ 862.532936] env[61852]: DEBUG nova.compute.manager [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 862.533674] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834adeac-5c5e-4f33-8e3d-488e55ff7ca5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.555595] env[61852]: DEBUG oslo_vmware.api [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Task: {'id': task-1293023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161123} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.556011] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.556304] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.556584] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.556847] env[61852]: INFO nova.compute.manager [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 862.557219] env[61852]: DEBUG oslo.service.loopingcall [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.557497] env[61852]: DEBUG nova.compute.manager [-] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.557630] env[61852]: DEBUG nova.network.neutron [-] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.631901] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293024, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064902} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.632272] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 862.633065] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0138fc-adf5-48d4-8e3f-9b6eafb3eff0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.656088] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb/23ff3009-7b13-4d5e-93ed-ca1c3e9127bb.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 862.657379] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ed704ae-094c-4cb8-81d8-d97d2504d731 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.684588] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 862.684588] env[61852]: value = "task-1293027" [ 862.684588] env[61852]: _type = "Task" [ 862.684588] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.693799] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293027, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.806161] env[61852]: DEBUG nova.compute.manager [req-0548dca9-5e25-45bd-9ccc-8fbe2682c4ef req-c6418997-f628-4a28-9844-596f60a52b55 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Received event network-vif-deleted-d1e216a2-48ce-4945-8024-f78b3701fd65 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 862.806423] env[61852]: INFO nova.compute.manager [req-0548dca9-5e25-45bd-9ccc-8fbe2682c4ef req-c6418997-f628-4a28-9844-596f60a52b55 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Neutron deleted interface d1e216a2-48ce-4945-8024-f78b3701fd65; detaching it from the instance and deleting it from the info cache [ 862.806539] env[61852]: DEBUG nova.network.neutron [req-0548dca9-5e25-45bd-9ccc-8fbe2682c4ef req-c6418997-f628-4a28-9844-596f60a52b55 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.824860] env[61852]: DEBUG nova.compute.manager [req-6f99d4d5-c32f-4788-838d-92c2f83c65d3 req-35c468d2-6655-4a9a-9c2c-515e43dbb53f service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Received event network-vif-deleted-33def83c-31aa-4bb0-9af4-8c7657457d6f {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 862.825109] env[61852]: INFO nova.compute.manager [req-6f99d4d5-c32f-4788-838d-92c2f83c65d3 req-35c468d2-6655-4a9a-9c2c-515e43dbb53f service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Neutron deleted interface 33def83c-31aa-4bb0-9af4-8c7657457d6f; detaching it from the instance and deleting it from the info cache [ 862.825309] env[61852]: DEBUG nova.network.neutron [req-6f99d4d5-c32f-4788-838d-92c2f83c65d3 req-35c468d2-6655-4a9a-9c2c-515e43dbb53f service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.862036] env[61852]: DEBUG nova.compute.manager [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 862.870433] env[61852]: DEBUG oslo_vmware.api [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293026, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158722} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.870696] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.870882] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.871077] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.871261] env[61852]: INFO nova.compute.manager [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 862.871504] env[61852]: DEBUG oslo.service.loopingcall [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.871695] env[61852]: DEBUG nova.compute.manager [-] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.871815] env[61852]: DEBUG nova.network.neutron [-] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.957045] env[61852]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 669836ae-c7e6-440f-b9bf-84b0d95a595e could not be found.", "detail": ""}} {{(pid=61852) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 862.957045] env[61852]: DEBUG nova.network.neutron [-] Unable to show port 669836ae-c7e6-440f-b9bf-84b0d95a595e as it no longer exists. {{(pid=61852) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 862.960465] env[61852]: DEBUG nova.scheduler.client.report [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 863.052269] env[61852]: INFO nova.compute.manager [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Took 29.74 seconds to build instance. [ 863.180662] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "8897a654-6805-45b0-b12b-16f7981d33ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.180662] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "8897a654-6805-45b0-b12b-16f7981d33ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.180662] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "8897a654-6805-45b0-b12b-16f7981d33ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.181717] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "8897a654-6805-45b0-b12b-16f7981d33ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.181717] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "8897a654-6805-45b0-b12b-16f7981d33ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.183694] env[61852]: INFO nova.compute.manager [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Terminating instance [ 863.188658] env[61852]: DEBUG nova.compute.manager [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 863.188863] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 863.189639] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc52d8b6-d00f-444f-9754-c00fcfafec1b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.197163] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 863.199688] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41a3ab25-4c2d-44b1-a56d-966b05321230 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.201447] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293027, 'name': ReconfigVM_Task, 'duration_secs': 0.251808} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.201889] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb/23ff3009-7b13-4d5e-93ed-ca1c3e9127bb.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 863.202778] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3ff1a3f-a737-4ab4-bdbb-3dea8eaab5c0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.207283] env[61852]: DEBUG oslo_vmware.api [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 863.207283] env[61852]: value = "task-1293028" [ 863.207283] env[61852]: _type = "Task" [ 863.207283] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.211156] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 863.211156] env[61852]: value = "task-1293029" [ 863.211156] env[61852]: _type = "Task" [ 863.211156] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.219015] env[61852]: DEBUG oslo_vmware.api [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.222326] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293029, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.276844] env[61852]: DEBUG nova.network.neutron [-] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.304283] env[61852]: DEBUG nova.network.neutron [-] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.310025] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c9ab615-a8ad-4f98-b606-47f2523ff131 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.323561] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651281d0-4b5b-4b7f-a8a1-2baa1d1b7c58 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.343304] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f88b4db3-ca89-49fd-a305-824c629c782d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.361857] env[61852]: DEBUG nova.compute.manager [req-0548dca9-5e25-45bd-9ccc-8fbe2682c4ef req-c6418997-f628-4a28-9844-596f60a52b55 service nova] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Detach interface failed, port_id=d1e216a2-48ce-4945-8024-f78b3701fd65, reason: Instance f8ebb1b7-39c6-486e-ab25-23080d858846 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 863.365558] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4b2dd3-8a37-47f9-9ad0-06e976f3117e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.393868] env[61852]: DEBUG nova.compute.manager [req-6f99d4d5-c32f-4788-838d-92c2f83c65d3 req-35c468d2-6655-4a9a-9c2c-515e43dbb53f service nova] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Detach interface failed, port_id=33def83c-31aa-4bb0-9af4-8c7657457d6f, reason: Instance 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 863.395721] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.471768] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.472437] env[61852]: DEBUG nova.compute.manager [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 863.475675] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.541s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.475675] env[61852]: DEBUG nova.objects.instance [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lazy-loading 'resources' on Instance uuid b0f8f7dd-e559-43be-b541-c3da48a07d68 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.554442] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6f727ad2-6044-4246-9e0f-3472172d4db8 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "254919cb-e3cd-4288-8696-95e632d78a38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.980s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.723749] env[61852]: DEBUG oslo_vmware.api [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293028, 'name': PowerOffVM_Task, 'duration_secs': 0.185272} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.723749] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293029, 'name': Rename_Task, 'duration_secs': 0.170478} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.723749] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 863.724358] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 863.724775] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 863.725177] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1233e778-ec54-4635-ae6b-9e2d75ea63c7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.728068] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5240d408-6368-407a-8268-9410ba84dd56 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.733591] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 863.733591] env[61852]: value = "task-1293031" [ 863.733591] env[61852]: _type = "Task" [ 863.733591] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.747187] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293031, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.783041] env[61852]: INFO nova.compute.manager [-] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Took 1.28 seconds to deallocate network for instance. [ 863.793332] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 863.793796] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 863.794198] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Deleting the datastore file [datastore1] 8897a654-6805-45b0-b12b-16f7981d33ad {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 863.794388] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b459ced-4d60-42ac-a17f-46e304ec179e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.800935] env[61852]: DEBUG oslo_vmware.api [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 863.800935] env[61852]: value = "task-1293032" [ 863.800935] env[61852]: _type = "Task" [ 863.800935] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.809121] env[61852]: INFO nova.compute.manager [-] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Took 1.25 seconds to deallocate network for instance. [ 863.814411] env[61852]: DEBUG oslo_vmware.api [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.921408] env[61852]: DEBUG nova.network.neutron [-] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.977987] env[61852]: DEBUG nova.compute.utils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 863.979412] env[61852]: DEBUG nova.compute.manager [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 863.979577] env[61852]: DEBUG nova.network.neutron [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.042433] env[61852]: DEBUG nova.policy [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeca45e07f5b41e38b9ab8ac31bad06c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14a017ea2b084ae0ad2994dda7809c7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 864.251077] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293031, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.290362] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805d603d-7a86-4ee2-a3fd-7d318019e18a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.294328] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.301036] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cea28d-8525-4c8f-9937-6908942f5c6c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.337553] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.339892] env[61852]: DEBUG nova.network.neutron [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Successfully created port: 36d2d052-6d80-4ad5-bb1b-2c54679bd05b {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.345413] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ad283c-4e67-4174-b3d2-ad0d94f924f9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.348050] env[61852]: DEBUG oslo_vmware.api [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293032, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204814} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.349450] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 864.349450] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 864.349588] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 864.350290] env[61852]: INFO nova.compute.manager [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Took 1.16 seconds to destroy the instance on the hypervisor. [ 864.350290] env[61852]: DEBUG oslo.service.loopingcall [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 864.351977] env[61852]: DEBUG nova.compute.manager [-] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 864.351977] env[61852]: DEBUG nova.network.neutron [-] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 864.356303] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd5130b-9b7b-47e4-992f-3d7989faefec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.372013] env[61852]: DEBUG nova.compute.provider_tree [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.423715] env[61852]: INFO nova.compute.manager [-] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Took 1.55 seconds to deallocate network for instance. [ 864.483104] env[61852]: DEBUG nova.compute.manager [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 864.751355] env[61852]: DEBUG oslo_vmware.api [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293031, 'name': PowerOnVM_Task, 'duration_secs': 0.518148} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.751680] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 864.751963] env[61852]: INFO nova.compute.manager [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Took 8.48 seconds to spawn the instance on the hypervisor. [ 864.752222] env[61852]: DEBUG nova.compute.manager [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 864.753238] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b861806-2d01-4b8b-a798-f5085a5fe11a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.836421] env[61852]: DEBUG nova.compute.manager [req-1369cc11-4fe6-4926-92e6-2a00627a77e4 req-8fddc128-21a8-4898-8f11-8239be00aec7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Received event network-vif-deleted-9a926e1c-a6f1-408c-84f3-dfb08cb0464c {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.836642] env[61852]: INFO nova.compute.manager [req-1369cc11-4fe6-4926-92e6-2a00627a77e4 req-8fddc128-21a8-4898-8f11-8239be00aec7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Neutron deleted interface 9a926e1c-a6f1-408c-84f3-dfb08cb0464c; detaching it from the instance and deleting it from the info cache [ 864.837017] env[61852]: DEBUG nova.network.neutron [req-1369cc11-4fe6-4926-92e6-2a00627a77e4 req-8fddc128-21a8-4898-8f11-8239be00aec7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.846915] env[61852]: DEBUG nova.compute.manager [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 864.846915] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdec33cf-0d76-45ac-8454-bd4f2e009c95 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.851792] env[61852]: DEBUG nova.compute.manager [req-a88b5907-84cc-4957-b4a4-5544d68a0c5e req-bbcc8613-5c64-4b0f-8d55-7509d8c8fd1a service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Received event network-vif-deleted-9e5204e6-6870-43d3-986f-9ca080104e14 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.875787] env[61852]: DEBUG nova.scheduler.client.report [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.930811] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.119324] env[61852]: DEBUG nova.network.neutron [-] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.272946] env[61852]: INFO nova.compute.manager [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Took 28.87 seconds to build instance. [ 865.339883] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b350f9f-7a61-4b77-8817-6bf373273ce6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.350523] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de7c0fc-8df2-4f45-a099-36b76d569736 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.362663] env[61852]: INFO nova.compute.manager [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] instance snapshotting [ 865.368349] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c12b8fb-1149-4449-b1bb-6e8b054e2e8f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.379640] env[61852]: DEBUG nova.compute.manager [req-1369cc11-4fe6-4926-92e6-2a00627a77e4 req-8fddc128-21a8-4898-8f11-8239be00aec7 service nova] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Detach interface failed, port_id=9a926e1c-a6f1-408c-84f3-dfb08cb0464c, reason: Instance 8897a654-6805-45b0-b12b-16f7981d33ad could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 865.380902] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.906s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.396398] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.428s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.396670] env[61852]: DEBUG nova.objects.instance [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lazy-loading 'resources' on Instance uuid 988c0a5c-b84d-44cf-9068-defd7132b0c9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.398891] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b8e4b1-e0b3-486e-a8c2-47f3bc2ba7e2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.418698] env[61852]: INFO nova.scheduler.client.report [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted allocations for instance b0f8f7dd-e559-43be-b541-c3da48a07d68 [ 865.493048] env[61852]: DEBUG nova.compute.manager [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 865.515192] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 865.515423] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 865.515587] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.515772] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 865.515922] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.516267] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 865.516508] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 865.516679] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 865.516855] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 865.517102] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 865.517219] env[61852]: DEBUG nova.virt.hardware [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.518364] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eac146d-02ae-4b8e-b085-fe0cc95708b6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.527129] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f5cfbc-aad2-4d92-a65e-21af4a30a237 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.621814] env[61852]: INFO nova.compute.manager [-] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Took 1.27 seconds to deallocate network for instance. [ 865.774928] env[61852]: DEBUG oslo_concurrency.lockutils [None req-044da8e1-74f8-4904-94bd-58f32a2169e3 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.100s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.853776] env[61852]: DEBUG nova.network.neutron [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Successfully updated port: 36d2d052-6d80-4ad5-bb1b-2c54679bd05b {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.910854] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Creating Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 865.910854] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1b8c290d-d616-4a4d-a334-62097283a062 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.918183] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 865.918183] env[61852]: value = "task-1293033" [ 865.918183] env[61852]: _type = "Task" [ 865.918183] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.932219] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293033, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.932797] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a07a01ff-6288-4660-ac27-f28a333883d1 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "b0f8f7dd-e559-43be-b541-c3da48a07d68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.025s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.975522] env[61852]: INFO nova.compute.manager [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Rescuing [ 865.975794] env[61852]: DEBUG oslo_concurrency.lockutils [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.975955] env[61852]: DEBUG oslo_concurrency.lockutils [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.976138] env[61852]: DEBUG nova.network.neutron [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.122846] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c103ae-095f-4434-9107-4533ff00d712 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.130263] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.131337] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ce8bc9-12fb-4226-8ee3-b001e141fc86 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.161531] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f5db3e-6b3f-4eff-aa1b-f28251b9364b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.168645] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c74552-8242-4e2b-b414-2e1b4bd3d384 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.182313] env[61852]: DEBUG nova.compute.provider_tree [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.356795] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.356960] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.357130] env[61852]: DEBUG nova.network.neutron [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.427785] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293033, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.665394] env[61852]: DEBUG nova.network.neutron [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Updating instance_info_cache with network_info: [{"id": "9444dd57-04ba-4f44-8080-68ec800cc9b6", "address": "fa:16:3e:95:4d:d6", "network": {"id": "f986fa1f-0449-45a5-86ee-66a7fe44ea49", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-901829409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8dff8d945da948a89ee0fb2e2ddd0f9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9444dd57-04", "ovs_interfaceid": "9444dd57-04ba-4f44-8080-68ec800cc9b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.685613] env[61852]: DEBUG nova.scheduler.client.report [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 866.877104] env[61852]: DEBUG nova.compute.manager [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Received event network-vif-plugged-36d2d052-6d80-4ad5-bb1b-2c54679bd05b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.877357] env[61852]: DEBUG oslo_concurrency.lockutils [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] Acquiring lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.877647] env[61852]: DEBUG oslo_concurrency.lockutils [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] Lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.877902] env[61852]: DEBUG oslo_concurrency.lockutils [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] Lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.878484] env[61852]: DEBUG nova.compute.manager [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] No waiting events found dispatching network-vif-plugged-36d2d052-6d80-4ad5-bb1b-2c54679bd05b {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.878754] env[61852]: WARNING nova.compute.manager [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Received unexpected event network-vif-plugged-36d2d052-6d80-4ad5-bb1b-2c54679bd05b for instance with vm_state building and task_state spawning. [ 866.878967] env[61852]: DEBUG nova.compute.manager [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Received event network-changed-36d2d052-6d80-4ad5-bb1b-2c54679bd05b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.879230] env[61852]: DEBUG nova.compute.manager [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Refreshing instance network info cache due to event network-changed-36d2d052-6d80-4ad5-bb1b-2c54679bd05b. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 866.879488] env[61852]: DEBUG oslo_concurrency.lockutils [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] Acquiring lock "refresh_cache-4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.892749] env[61852]: DEBUG nova.network.neutron [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.931964] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293033, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.033167] env[61852]: DEBUG nova.network.neutron [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Updating instance_info_cache with network_info: [{"id": "36d2d052-6d80-4ad5-bb1b-2c54679bd05b", "address": "fa:16:3e:4c:62:3b", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d2d052-6d", "ovs_interfaceid": "36d2d052-6d80-4ad5-bb1b-2c54679bd05b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.168680] env[61852]: DEBUG oslo_concurrency.lockutils [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.192088] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.796s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.194287] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.168s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.194636] env[61852]: DEBUG nova.objects.instance [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lazy-loading 'resources' on Instance uuid f48b40ab-23f2-4071-8168-e7e2411ad64d {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.210355] env[61852]: INFO nova.scheduler.client.report [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Deleted allocations for instance 988c0a5c-b84d-44cf-9068-defd7132b0c9 [ 867.428945] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293033, 'name': CreateSnapshot_Task, 'duration_secs': 1.063669} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.429249] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Created Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 867.429976] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87967c20-860a-4ff0-a494-ba94207e81cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.535887] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.536226] env[61852]: DEBUG nova.compute.manager [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Instance network_info: |[{"id": "36d2d052-6d80-4ad5-bb1b-2c54679bd05b", "address": "fa:16:3e:4c:62:3b", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d2d052-6d", "ovs_interfaceid": "36d2d052-6d80-4ad5-bb1b-2c54679bd05b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 867.536569] env[61852]: DEBUG oslo_concurrency.lockutils [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] Acquired lock "refresh_cache-4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.536758] env[61852]: DEBUG nova.network.neutron [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Refreshing network info cache for port 36d2d052-6d80-4ad5-bb1b-2c54679bd05b {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.537977] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:62:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36d2d052-6d80-4ad5-bb1b-2c54679bd05b', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.545920] env[61852]: DEBUG oslo.service.loopingcall [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.546281] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.546511] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b6bcc0e-c06e-440e-93ba-3b080444f8c2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.566176] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.566176] env[61852]: value = "task-1293034" [ 867.566176] env[61852]: _type = "Task" [ 867.566176] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.573807] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293034, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.697855] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 867.698139] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-745bbc12-c02e-40ee-82f5-bc86aaee0eb4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.707302] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 867.707302] env[61852]: value = "task-1293035" [ 867.707302] env[61852]: _type = "Task" [ 867.707302] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.715445] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293035, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.721279] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9ec4f7b5-2b8d-47ac-a79c-857b0158a6d9 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "988c0a5c-b84d-44cf-9068-defd7132b0c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.866s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.919603] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c3f112-95a0-4409-a8dc-bcb175fefa2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.927102] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbcec3c-ec40-47e5-be7b-d3eb706e3124 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.965113] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Creating linked-clone VM from snapshot {{(pid=61852) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 867.965869] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f9b7c9a1-46ba-448c-875a-ab66d62c53a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.969370] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f48e1c-5cc2-4023-8103-29b84f757b2f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.977752] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e76124-cfbe-45fa-aaa9-4400285fc8fa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.982202] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 867.982202] env[61852]: value = "task-1293036" [ 867.982202] env[61852]: _type = "Task" [ 867.982202] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.995113] env[61852]: DEBUG nova.compute.provider_tree [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.000260] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293036, 'name': CloneVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.075850] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293034, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.217388] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293035, 'name': PowerOffVM_Task, 'duration_secs': 0.198299} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.217623] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 868.218419] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c547ae-a6d4-4f8a-afed-58c2630b248e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.239081] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e70c820-7db1-487b-a460-26df9b47cc0d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.256570] env[61852]: DEBUG nova.network.neutron [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Updated VIF entry in instance network info cache for port 36d2d052-6d80-4ad5-bb1b-2c54679bd05b. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 868.256931] env[61852]: DEBUG nova.network.neutron [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Updating instance_info_cache with network_info: [{"id": "36d2d052-6d80-4ad5-bb1b-2c54679bd05b", "address": "fa:16:3e:4c:62:3b", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d2d052-6d", "ovs_interfaceid": "36d2d052-6d80-4ad5-bb1b-2c54679bd05b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.266419] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.266993] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8fc76bc1-4809-4f84-8350-3ca70e079678 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.274913] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 868.274913] env[61852]: value = "task-1293037" [ 868.274913] env[61852]: _type = "Task" [ 868.274913] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.284266] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] VM already powered off {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 868.284479] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.284723] env[61852]: DEBUG oslo_concurrency.lockutils [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.284878] env[61852]: DEBUG oslo_concurrency.lockutils [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.285104] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.285352] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a255f1d9-8b7a-4ac2-8ca4-bae6bda30e0f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.293093] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.293295] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.293991] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09aafc26-9ad6-4fb9-8e76-d90143c7d27a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.298907] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 868.298907] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52008f7e-200c-262d-7e91-bfec4c0a745c" [ 868.298907] env[61852]: _type = "Task" [ 868.298907] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.310338] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52008f7e-200c-262d-7e91-bfec4c0a745c, 'name': SearchDatastore_Task, 'duration_secs': 0.008778} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.311082] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdef1b77-1888-46a2-a826-35fd921e4e01 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.316128] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 868.316128] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5248fdb2-0583-1243-9076-f3d747400eb2" [ 868.316128] env[61852]: _type = "Task" [ 868.316128] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.323612] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5248fdb2-0583-1243-9076-f3d747400eb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.491331] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293036, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.497398] env[61852]: DEBUG nova.scheduler.client.report [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 868.577218] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293034, 'name': CreateVM_Task, 'duration_secs': 0.820016} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.577365] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.578177] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.578351] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.578688] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 868.578950] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d9e6ab1-b41f-4a91-b4cb-74c91d9f25b3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.584263] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 868.584263] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5293acfe-18aa-0b67-58dd-5cf5288ebef1" [ 868.584263] env[61852]: _type = "Task" [ 868.584263] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.592483] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5293acfe-18aa-0b67-58dd-5cf5288ebef1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.760224] env[61852]: DEBUG oslo_concurrency.lockutils [req-c52b35f3-8fa9-4512-96e6-363ae4252c04 req-29489fcf-5dd5-46d9-acc2-09dea4fabcd8 service nova] Releasing lock "refresh_cache-4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.826397] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5248fdb2-0583-1243-9076-f3d747400eb2, 'name': SearchDatastore_Task, 'duration_secs': 0.007947} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.826670] env[61852]: DEBUG oslo_concurrency.lockutils [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.826947] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk. {{(pid=61852) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 868.827226] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4dd4f5f2-6024-434d-8f88-4aa21ea9881e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.833307] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 868.833307] env[61852]: value = "task-1293038" [ 868.833307] env[61852]: _type = "Task" [ 868.833307] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.840674] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293038, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.994378] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293036, 'name': CloneVM_Task} progress is 95%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.004557] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.810s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.007034] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.828s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.008576] env[61852]: INFO nova.compute.claims [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 869.029302] env[61852]: INFO nova.scheduler.client.report [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Deleted allocations for instance f48b40ab-23f2-4071-8168-e7e2411ad64d [ 869.096930] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5293acfe-18aa-0b67-58dd-5cf5288ebef1, 'name': SearchDatastore_Task, 'duration_secs': 0.008919} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.097216] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.097353] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.097624] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.097748] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.097924] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.098536] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a088789a-60e9-4a24-8f10-4371c7361043 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.109776] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.109974] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.110977] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f792ecf1-fbf2-4534-883d-6b9e77f39c4c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.117961] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 869.117961] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d20f07-8e01-1187-3b3f-9a4a50898d3c" [ 869.117961] env[61852]: _type = "Task" [ 869.117961] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.127797] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d20f07-8e01-1187-3b3f-9a4a50898d3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.344236] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293038, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506492} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.346966] env[61852]: INFO nova.virt.vmwareapi.ds_util [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk. [ 869.346966] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009855ee-6f40-4f4d-ae7a-c1f2cd694354 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.370823] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.371127] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25afd39c-8134-459a-95a4-9adee1c23c79 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.389054] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 869.389054] env[61852]: value = "task-1293039" [ 869.389054] env[61852]: _type = "Task" [ 869.389054] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.397335] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293039, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.495731] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293036, 'name': CloneVM_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.541581] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7df8da0-d075-4b18-862d-dec765f2b922 tempest-MultipleCreateTestJSON-312067153 tempest-MultipleCreateTestJSON-312067153-project-member] Lock "f48b40ab-23f2-4071-8168-e7e2411ad64d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.565s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.629275] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d20f07-8e01-1187-3b3f-9a4a50898d3c, 'name': SearchDatastore_Task, 'duration_secs': 0.013939} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.630136] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1f8f330-a48c-48e4-9c0d-a05c8992870e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.635983] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 869.635983] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52320c01-f48c-1d2f-9d33-0d4512e79990" [ 869.635983] env[61852]: _type = "Task" [ 869.635983] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.645406] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52320c01-f48c-1d2f-9d33-0d4512e79990, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.903199] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293039, 'name': ReconfigVM_Task, 'duration_secs': 0.280734} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.903527] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.904553] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a852e8-dd45-4ea8-92fe-17da20b4125a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.929551] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6f2128c-628d-4e9d-a518-80910d1060ce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.948021] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 869.948021] env[61852]: value = "task-1293040" [ 869.948021] env[61852]: _type = "Task" [ 869.948021] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.953250] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293040, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.992419] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293036, 'name': CloneVM_Task, 'duration_secs': 1.7943} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.992706] env[61852]: INFO nova.virt.vmwareapi.vmops [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Created linked-clone VM from snapshot [ 869.993505] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6d0339-3b2e-4836-9efb-4513316e5f74 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.000790] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Uploading image ca674796-50b0-4a64-90f2-d0e6a238a167 {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 870.027151] env[61852]: DEBUG oslo_vmware.rw_handles [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 870.027151] env[61852]: value = "vm-277374" [ 870.027151] env[61852]: _type = "VirtualMachine" [ 870.027151] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 870.027445] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c9fd604c-fbe5-41c8-a5e4-cc343e3624d2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.035419] env[61852]: DEBUG oslo_vmware.rw_handles [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lease: (returnval){ [ 870.035419] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520b3ce0-e930-4eb2-34a6-8d8495a4ea55" [ 870.035419] env[61852]: _type = "HttpNfcLease" [ 870.035419] env[61852]: } obtained for exporting VM: (result){ [ 870.035419] env[61852]: value = "vm-277374" [ 870.035419] env[61852]: _type = "VirtualMachine" [ 870.035419] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 870.035419] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the lease: (returnval){ [ 870.035419] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520b3ce0-e930-4eb2-34a6-8d8495a4ea55" [ 870.035419] env[61852]: _type = "HttpNfcLease" [ 870.035419] env[61852]: } to be ready. {{(pid=61852) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 870.043786] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 870.043786] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520b3ce0-e930-4eb2-34a6-8d8495a4ea55" [ 870.043786] env[61852]: _type = "HttpNfcLease" [ 870.043786] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 870.149272] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52320c01-f48c-1d2f-9d33-0d4512e79990, 'name': SearchDatastore_Task, 'duration_secs': 0.010827} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.149564] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.149821] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92/4b85f2d7-d99a-4332-a78c-3f2a50c7cb92.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.150101] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82cfc496-7380-41e0-ba5d-2bcd901fcfdb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.159281] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 870.159281] env[61852]: value = "task-1293042" [ 870.159281] env[61852]: _type = "Task" [ 870.159281] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.169219] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.229449] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5708b50-e496-4977-93c0-edbaa82f7389 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.237253] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca04e5df-cfa4-4363-baad-3d53b9146c21 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.267678] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8509a4-86dc-4129-bd22-adb938098ad1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.275539] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db205fe-06e9-45b8-b98d-635dbed0c331 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.288806] env[61852]: DEBUG nova.compute.provider_tree [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.456951] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293040, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.547145] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 870.547145] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520b3ce0-e930-4eb2-34a6-8d8495a4ea55" [ 870.547145] env[61852]: _type = "HttpNfcLease" [ 870.547145] env[61852]: } is ready. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 870.547567] env[61852]: DEBUG oslo_vmware.rw_handles [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 870.547567] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520b3ce0-e930-4eb2-34a6-8d8495a4ea55" [ 870.547567] env[61852]: _type = "HttpNfcLease" [ 870.547567] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 870.548995] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f3ff8d-e853-4b1d-95a0-1eb8fc3cf9e6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.560457] env[61852]: DEBUG oslo_vmware.rw_handles [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520553ff-d1a8-2a66-f89a-e3be909575bb/disk-0.vmdk from lease info. {{(pid=61852) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 870.560771] env[61852]: DEBUG oslo_vmware.rw_handles [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520553ff-d1a8-2a66-f89a-e3be909575bb/disk-0.vmdk for reading. {{(pid=61852) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 870.671466] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.730192] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-82b3a69d-04a6-41e3-90a0-b73f1e3195ed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.794159] env[61852]: DEBUG nova.scheduler.client.report [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 870.960432] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293040, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.170765] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293042, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.297739] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.290s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.298730] env[61852]: DEBUG nova.compute.manager [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 871.302862] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.169s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.303699] env[61852]: DEBUG nova.objects.instance [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lazy-loading 'resources' on Instance uuid aeaa2828-6d83-4b26-bd1c-5f654c70713f {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 871.461359] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293040, 'name': ReconfigVM_Task, 'duration_secs': 1.159541} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.462085] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 871.462431] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7b024d6-ff1b-4b94-9b25-c44e24832888 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.472586] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 871.472586] env[61852]: value = "task-1293043" [ 871.472586] env[61852]: _type = "Task" [ 871.472586] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.483617] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293043, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.671425] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293042, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.362313} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.672211] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92/4b85f2d7-d99a-4332-a78c-3f2a50c7cb92.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.672211] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.673149] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3364809-c2fc-467f-a1bd-a7a885f62845 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.680126] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 871.680126] env[61852]: value = "task-1293044" [ 871.680126] env[61852]: _type = "Task" [ 871.680126] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.688684] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293044, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.807546] env[61852]: DEBUG nova.compute.utils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 871.812423] env[61852]: DEBUG nova.compute.manager [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 871.812880] env[61852]: DEBUG nova.network.neutron [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 871.881384] env[61852]: DEBUG nova.policy [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81f93d952cce4f6a8cd87f87696786aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0c7f48c684044564b9081d6bc04c7e29', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 871.985661] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293043, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.040625] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1b207b-3cdf-44e6-984d-2f6a60206bd4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.048473] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5660cb-56e8-49e2-9503-85abdff89c80 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.081656] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5033fb79-3c57-418b-96ad-0c25fb57e1f3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.091177] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba273c0b-574f-4636-a2bc-d251d5e89d5f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.102917] env[61852]: DEBUG nova.compute.provider_tree [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.165399] env[61852]: DEBUG nova.network.neutron [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Successfully created port: 1fce3501-a013-4bf6-a413-f63b810e42d6 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.191881] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293044, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064172} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.192289] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 872.193092] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ea630f-03bd-43dd-b281-dab87f0c3c85 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.216410] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92/4b85f2d7-d99a-4332-a78c-3f2a50c7cb92.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 872.216806] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd600010-3890-4426-ab3f-b8dd12cb15f3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.236293] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 872.236293] env[61852]: value = "task-1293045" [ 872.236293] env[61852]: _type = "Task" [ 872.236293] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.244630] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293045, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.312930] env[61852]: DEBUG nova.compute.manager [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 872.415647] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.415907] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.485144] env[61852]: DEBUG oslo_vmware.api [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293043, 'name': PowerOnVM_Task, 'duration_secs': 0.587961} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.485451] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.489031] env[61852]: DEBUG nova.compute.manager [None req-23626484-d91b-4c98-84f2-331d617a9130 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 872.489151] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37eade3e-5028-4cf3-9f14-fb82c839579b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.567551] env[61852]: DEBUG nova.network.neutron [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Successfully created port: c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.606586] env[61852]: DEBUG nova.scheduler.client.report [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 872.749718] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293045, 'name': ReconfigVM_Task, 'duration_secs': 0.398836} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.750167] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92/4b85f2d7-d99a-4332-a78c-3f2a50c7cb92.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.750837] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f77bcc9c-b0ec-4a6f-82ba-e35e49929e1f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.758651] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 872.758651] env[61852]: value = "task-1293046" [ 872.758651] env[61852]: _type = "Task" [ 872.758651] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.771281] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293046, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.923732] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.923732] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 872.923732] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Rebuilding the list of instances to heal {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 873.114198] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.811s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.116966] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.633s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.116966] env[61852]: DEBUG nova.objects.instance [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lazy-loading 'resources' on Instance uuid 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.138172] env[61852]: INFO nova.scheduler.client.report [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Deleted allocations for instance aeaa2828-6d83-4b26-bd1c-5f654c70713f [ 873.268717] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293046, 'name': Rename_Task, 'duration_secs': 0.187688} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.269144] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 873.269264] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63024b4d-d804-468b-83ef-1983e7299f8f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.275574] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 873.275574] env[61852]: value = "task-1293047" [ 873.275574] env[61852]: _type = "Task" [ 873.275574] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.283120] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293047, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.323583] env[61852]: DEBUG nova.compute.manager [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 873.350160] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 873.350419] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 873.350579] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.350761] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 873.350908] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.351067] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 873.351303] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 873.351478] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 873.351647] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 873.351812] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 873.351985] env[61852]: DEBUG nova.virt.hardware [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 873.352879] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d217de5-0692-4a74-b91b-3e6234b546aa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.362130] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdbdb36-e1a8-4a87-a7ab-c5386c14de1f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.427932] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Skipping network cache update for instance because it is being deleted. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 873.428198] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 873.428298] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 873.445136] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.445287] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquired lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.445452] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Forcefully refreshing network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 873.445606] env[61852]: DEBUG nova.objects.instance [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lazy-loading 'info_cache' on Instance uuid d3922357-383f-4f7e-9c76-4eb688a092b9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.619628] env[61852]: DEBUG nova.objects.instance [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lazy-loading 'numa_topology' on Instance uuid 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.645405] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea9e88f7-2ae4-427b-aaf9-a391925a970f tempest-ServersNegativeTestMultiTenantJSON-180882239 tempest-ServersNegativeTestMultiTenantJSON-180882239-project-member] Lock "aeaa2828-6d83-4b26-bd1c-5f654c70713f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.803s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.788455] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293047, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.010293] env[61852]: DEBUG nova.compute.manager [req-e349d16e-50bf-4f89-939c-b82453981218 req-ff6944b0-c9a2-4d39-9a14-949326e92bc9 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Received event network-vif-plugged-1fce3501-a013-4bf6-a413-f63b810e42d6 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 874.010476] env[61852]: DEBUG oslo_concurrency.lockutils [req-e349d16e-50bf-4f89-939c-b82453981218 req-ff6944b0-c9a2-4d39-9a14-949326e92bc9 service nova] Acquiring lock "eae1ad1f-f213-4227-93aa-b0ccf660e638-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.010632] env[61852]: DEBUG oslo_concurrency.lockutils [req-e349d16e-50bf-4f89-939c-b82453981218 req-ff6944b0-c9a2-4d39-9a14-949326e92bc9 service nova] Lock "eae1ad1f-f213-4227-93aa-b0ccf660e638-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.010822] env[61852]: DEBUG oslo_concurrency.lockutils [req-e349d16e-50bf-4f89-939c-b82453981218 req-ff6944b0-c9a2-4d39-9a14-949326e92bc9 service nova] Lock "eae1ad1f-f213-4227-93aa-b0ccf660e638-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.010903] env[61852]: DEBUG nova.compute.manager [req-e349d16e-50bf-4f89-939c-b82453981218 req-ff6944b0-c9a2-4d39-9a14-949326e92bc9 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] No waiting events found dispatching network-vif-plugged-1fce3501-a013-4bf6-a413-f63b810e42d6 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 874.011364] env[61852]: WARNING nova.compute.manager [req-e349d16e-50bf-4f89-939c-b82453981218 req-ff6944b0-c9a2-4d39-9a14-949326e92bc9 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Received unexpected event network-vif-plugged-1fce3501-a013-4bf6-a413-f63b810e42d6 for instance with vm_state building and task_state spawning. [ 874.109324] env[61852]: DEBUG nova.network.neutron [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Successfully updated port: 1fce3501-a013-4bf6-a413-f63b810e42d6 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 874.122223] env[61852]: DEBUG nova.objects.base [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Object Instance<00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9> lazy-loaded attributes: resources,numa_topology {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 874.294916] env[61852]: DEBUG oslo_vmware.api [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293047, 'name': PowerOnVM_Task, 'duration_secs': 0.544889} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.294916] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 874.295244] env[61852]: INFO nova.compute.manager [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Took 8.80 seconds to spawn the instance on the hypervisor. [ 874.295293] env[61852]: DEBUG nova.compute.manager [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 874.296120] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13213bb1-d623-49bf-856a-9a4fdddf0bbd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.338026] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a47ce5-7a65-4377-8acd-f64e54bf2150 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.346529] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b125bb-7249-4b51-b6f5-512ef1d8f5da {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.377760] env[61852]: INFO nova.compute.manager [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Unrescuing [ 874.378107] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.378235] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquired lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.378406] env[61852]: DEBUG nova.network.neutron [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.381861] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08910d3e-1505-48f5-a1b0-bf91d7d3f92f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.390656] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30ed110-c86c-40ec-9726-189f5ecfe47d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.406049] env[61852]: DEBUG nova.compute.provider_tree [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.468152] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 874.821393] env[61852]: INFO nova.compute.manager [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Took 25.24 seconds to build instance. [ 874.908851] env[61852]: DEBUG nova.scheduler.client.report [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 875.061129] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.266756] env[61852]: DEBUG nova.network.neutron [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Updating instance_info_cache with network_info: [{"id": "9444dd57-04ba-4f44-8080-68ec800cc9b6", "address": "fa:16:3e:95:4d:d6", "network": {"id": "f986fa1f-0449-45a5-86ee-66a7fe44ea49", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-901829409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8dff8d945da948a89ee0fb2e2ddd0f9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9444dd57-04", "ovs_interfaceid": "9444dd57-04ba-4f44-8080-68ec800cc9b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.325524] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bc167caa-72af-4cb8-8228-0e141723f3e3 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.750s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.414441] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.298s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.421254] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.469s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.422866] env[61852]: INFO nova.compute.claims [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.564815] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Releasing lock "refresh_cache-d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.565072] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Updated the network info_cache for instance {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 875.565337] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 875.565605] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 875.565914] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 875.566223] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 875.566342] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 875.566502] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 875.566652] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 875.566913] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 875.769843] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Releasing lock "refresh_cache-23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.772672] env[61852]: DEBUG nova.objects.instance [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lazy-loading 'flavor' on Instance uuid 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.939164] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6af23757-7da0-4d51-97c6-0fd7f29117bc tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 36.574s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.940154] env[61852]: DEBUG oslo_concurrency.lockutils [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 15.943s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.940425] env[61852]: DEBUG oslo_concurrency.lockutils [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.940697] env[61852]: DEBUG oslo_concurrency.lockutils [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.940936] env[61852]: DEBUG oslo_concurrency.lockutils [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.943067] env[61852]: INFO nova.compute.manager [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Terminating instance [ 875.944945] env[61852]: DEBUG nova.compute.manager [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 875.945174] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 875.945473] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3dc97ef9-8efa-4343-b8ac-4f4f52ec2cee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.955994] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3a9419-5d5c-4de5-8b2c-b894c65bbcbf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.982977] env[61852]: WARNING nova.virt.vmwareapi.vmops [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9 could not be found. [ 875.983210] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.983399] env[61852]: INFO nova.compute.manager [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 875.983651] env[61852]: DEBUG oslo.service.loopingcall [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 875.983892] env[61852]: DEBUG nova.compute.manager [-] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 875.983980] env[61852]: DEBUG nova.network.neutron [-] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 876.070314] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.184597] env[61852]: DEBUG nova.compute.manager [req-4115d4b6-75a9-4c7b-a903-9b923a1f25f4 req-26762ce3-cf81-48b8-9a59-843042d55be6 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Received event network-changed-1fce3501-a013-4bf6-a413-f63b810e42d6 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 876.185153] env[61852]: DEBUG nova.compute.manager [req-4115d4b6-75a9-4c7b-a903-9b923a1f25f4 req-26762ce3-cf81-48b8-9a59-843042d55be6 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Refreshing instance network info cache due to event network-changed-1fce3501-a013-4bf6-a413-f63b810e42d6. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 876.185504] env[61852]: DEBUG oslo_concurrency.lockutils [req-4115d4b6-75a9-4c7b-a903-9b923a1f25f4 req-26762ce3-cf81-48b8-9a59-843042d55be6 service nova] Acquiring lock "refresh_cache-eae1ad1f-f213-4227-93aa-b0ccf660e638" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.186009] env[61852]: DEBUG oslo_concurrency.lockutils [req-4115d4b6-75a9-4c7b-a903-9b923a1f25f4 req-26762ce3-cf81-48b8-9a59-843042d55be6 service nova] Acquired lock "refresh_cache-eae1ad1f-f213-4227-93aa-b0ccf660e638" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.186009] env[61852]: DEBUG nova.network.neutron [req-4115d4b6-75a9-4c7b-a903-9b923a1f25f4 req-26762ce3-cf81-48b8-9a59-843042d55be6 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Refreshing network info cache for port 1fce3501-a013-4bf6-a413-f63b810e42d6 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 876.279585] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ea5408-4983-42f2-98c6-03586761dd6b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.313737] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 876.314464] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9225e45c-ee45-4018-8c7a-6e044ace89d0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.321157] env[61852]: DEBUG oslo_vmware.api [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 876.321157] env[61852]: value = "task-1293048" [ 876.321157] env[61852]: _type = "Task" [ 876.321157] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.332349] env[61852]: DEBUG oslo_vmware.api [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.357974] env[61852]: DEBUG nova.network.neutron [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Successfully updated port: c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 876.669651] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1748b9ad-0690-4838-b7be-d0cdeb4f1d0d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.678546] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c46f8e-ee52-413d-9a7c-d5aed47a4f5c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.716107] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca6e66b-75d8-4f1b-86c4-51fee734f632 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.727161] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed87d290-9496-4543-85ff-f622a12c9365 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.741133] env[61852]: DEBUG nova.compute.provider_tree [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.756909] env[61852]: DEBUG nova.network.neutron [req-4115d4b6-75a9-4c7b-a903-9b923a1f25f4 req-26762ce3-cf81-48b8-9a59-843042d55be6 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.769235] env[61852]: DEBUG nova.network.neutron [-] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.830925] env[61852]: DEBUG oslo_vmware.api [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293048, 'name': PowerOffVM_Task, 'duration_secs': 0.216027} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.831333] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.837264] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Reconfiguring VM instance instance-0000004b to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 876.837625] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aafa1a88-3e3c-4d64-bac6-77768ef944bb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.853930] env[61852]: DEBUG nova.network.neutron [req-4115d4b6-75a9-4c7b-a903-9b923a1f25f4 req-26762ce3-cf81-48b8-9a59-843042d55be6 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.861056] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "refresh_cache-eae1ad1f-f213-4227-93aa-b0ccf660e638" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.861408] env[61852]: DEBUG oslo_vmware.api [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 876.861408] env[61852]: value = "task-1293049" [ 876.861408] env[61852]: _type = "Task" [ 876.861408] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.871206] env[61852]: DEBUG oslo_vmware.api [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293049, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.894941] env[61852]: INFO nova.compute.manager [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Rebuilding instance [ 876.961944] env[61852]: DEBUG nova.compute.manager [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 876.963026] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b57ff30-2361-4269-a28c-fa0c284d2b88 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.244970] env[61852]: DEBUG nova.scheduler.client.report [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 877.272860] env[61852]: INFO nova.compute.manager [-] [instance: 00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9] Took 1.29 seconds to deallocate network for instance. [ 877.357373] env[61852]: DEBUG oslo_concurrency.lockutils [req-4115d4b6-75a9-4c7b-a903-9b923a1f25f4 req-26762ce3-cf81-48b8-9a59-843042d55be6 service nova] Releasing lock "refresh_cache-eae1ad1f-f213-4227-93aa-b0ccf660e638" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.357780] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquired lock "refresh_cache-eae1ad1f-f213-4227-93aa-b0ccf660e638" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.357899] env[61852]: DEBUG nova.network.neutron [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.372347] env[61852]: DEBUG oslo_vmware.api [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293049, 'name': ReconfigVM_Task, 'duration_secs': 0.238077} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.372679] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Reconfigured VM instance instance-0000004b to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 877.372876] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 877.373148] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ff7e9ef-4962-49b8-bbf9-ba1739b82401 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.380020] env[61852]: DEBUG oslo_vmware.api [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 877.380020] env[61852]: value = "task-1293050" [ 877.380020] env[61852]: _type = "Task" [ 877.380020] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.388028] env[61852]: DEBUG oslo_vmware.api [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293050, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.479770] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 877.479770] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-925abe17-320c-434d-9dd4-7b7291f71e22 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.486057] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 877.486057] env[61852]: value = "task-1293051" [ 877.486057] env[61852]: _type = "Task" [ 877.486057] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.494145] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.752286] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.753114] env[61852]: DEBUG nova.compute.manager [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 877.760234] env[61852]: DEBUG oslo_concurrency.lockutils [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.706s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.760622] env[61852]: DEBUG nova.objects.instance [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lazy-loading 'resources' on Instance uuid 21d74604-6a64-44ee-a012-ebff7166853e {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 877.891472] env[61852]: DEBUG oslo_vmware.api [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293050, 'name': PowerOnVM_Task, 'duration_secs': 0.395002} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.891838] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.892110] env[61852]: DEBUG nova.compute.manager [None req-8864ec48-f8a1-4438-8a12-a5bb03224efa tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 877.892916] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e59a17-a178-4fe3-8325-dc1ffd196315 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.896173] env[61852]: DEBUG nova.network.neutron [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.997459] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293051, 'name': PowerOffVM_Task, 'duration_secs': 0.278094} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.997914] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 877.998159] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 877.999098] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee6a92f-9927-4037-b58a-da472f6a3eb9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.007129] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 878.007382] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f84612c3-7cf5-4ab3-8bba-073a00fa19c7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.071641] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.071878] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.072142] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleting the datastore file [datastore2] 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.072461] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7340fc6a-421f-4f85-8262-ab32e72d6519 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.079510] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 878.079510] env[61852]: value = "task-1293053" [ 878.079510] env[61852]: _type = "Task" [ 878.079510] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.090220] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.186183] env[61852]: DEBUG nova.network.neutron [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Updating instance_info_cache with network_info: [{"id": "1fce3501-a013-4bf6-a413-f63b810e42d6", "address": "fa:16:3e:46:bd:7b", "network": {"id": "323f546d-d6b6-4e75-98e4-f0b79b6a6884", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1156411187", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fce3501-a0", "ovs_interfaceid": "1fce3501-a013-4bf6-a413-f63b810e42d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa", "address": "fa:16:3e:df:ec:a4", "network": {"id": "8b4ef00f-12ce-405e-9c14-6f46a2a379c0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1197741628", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1ef5d89-7e", "ovs_interfaceid": "c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.212133] env[61852]: DEBUG nova.compute.manager [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Received event network-vif-plugged-c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 878.212444] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] Acquiring lock "eae1ad1f-f213-4227-93aa-b0ccf660e638-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.212661] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] Lock "eae1ad1f-f213-4227-93aa-b0ccf660e638-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.213060] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] Lock "eae1ad1f-f213-4227-93aa-b0ccf660e638-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.213247] env[61852]: DEBUG nova.compute.manager [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] No waiting events found dispatching network-vif-plugged-c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 878.213450] env[61852]: WARNING nova.compute.manager [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Received unexpected event network-vif-plugged-c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa for instance with vm_state building and task_state spawning. [ 878.213582] env[61852]: DEBUG nova.compute.manager [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Received event network-changed-c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 878.213742] env[61852]: DEBUG nova.compute.manager [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Refreshing instance network info cache due to event network-changed-c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 878.213910] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] Acquiring lock "refresh_cache-eae1ad1f-f213-4227-93aa-b0ccf660e638" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.263892] env[61852]: DEBUG nova.compute.utils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 878.269043] env[61852]: DEBUG nova.compute.manager [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 878.269155] env[61852]: DEBUG nova.network.neutron [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 878.300493] env[61852]: DEBUG oslo_concurrency.lockutils [None req-052a34bc-65a6-418a-bb1e-4f8ada420b51 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "00ac3612-8a92-4af6-af1c-cfcbeb4d6cc9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.360s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.313706] env[61852]: DEBUG nova.policy [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7422e3984ccc486dbfc98aa24a9295da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1fdd2d4aeb954b6fae049090b32f657b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 878.482675] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ececcb13-c297-4d14-a628-201e03c2c931 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.492163] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffca337-821c-42d1-80fe-6ba9c5812f84 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.523017] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c4f441-9aba-4611-915a-13a1ddee5f3d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.530877] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab0cbb9-c338-4bd5-84a5-24dac3df426e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.546228] env[61852]: DEBUG nova.compute.provider_tree [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.590799] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392433} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.591094] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 878.591308] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 878.591494] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.597408] env[61852]: DEBUG nova.network.neutron [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Successfully created port: 3377bf3e-e158-4caa-a81f-8ff46b934338 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 878.690223] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Releasing lock "refresh_cache-eae1ad1f-f213-4227-93aa-b0ccf660e638" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.690223] env[61852]: DEBUG nova.compute.manager [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Instance network_info: |[{"id": "1fce3501-a013-4bf6-a413-f63b810e42d6", "address": "fa:16:3e:46:bd:7b", "network": {"id": "323f546d-d6b6-4e75-98e4-f0b79b6a6884", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1156411187", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fce3501-a0", "ovs_interfaceid": "1fce3501-a013-4bf6-a413-f63b810e42d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa", "address": "fa:16:3e:df:ec:a4", "network": {"id": "8b4ef00f-12ce-405e-9c14-6f46a2a379c0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1197741628", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1ef5d89-7e", "ovs_interfaceid": "c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 878.690461] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] Acquired lock "refresh_cache-eae1ad1f-f213-4227-93aa-b0ccf660e638" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.690737] env[61852]: DEBUG nova.network.neutron [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Refreshing network info cache for port c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.694413] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:bd:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fce3501-a013-4bf6-a413-f63b810e42d6', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:ec:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 878.709899] env[61852]: DEBUG oslo.service.loopingcall [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.710740] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 878.712309] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbb8317f-7dfb-449d-8815-45e8c3524535 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.748170] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 878.748170] env[61852]: value = "task-1293054" [ 878.748170] env[61852]: _type = "Task" [ 878.748170] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.759786] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293054, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.767042] env[61852]: DEBUG nova.compute.manager [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 879.049250] env[61852]: DEBUG nova.scheduler.client.report [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 879.151900] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "4fb68588-21a8-4004-9bbc-aa1655624bcb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.152155] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "4fb68588-21a8-4004-9bbc-aa1655624bcb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.189167] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.189411] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.189552] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.189739] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.189918] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.192400] env[61852]: INFO nova.compute.manager [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Terminating instance [ 879.194510] env[61852]: DEBUG nova.compute.manager [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 879.194752] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.195712] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313b0b7c-7452-4a78-be71-11950dfd17a3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.206617] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.206947] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92928e34-aea9-4a52-a413-270d31bd0426 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.213975] env[61852]: DEBUG oslo_vmware.api [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 879.213975] env[61852]: value = "task-1293055" [ 879.213975] env[61852]: _type = "Task" [ 879.213975] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.222819] env[61852]: DEBUG oslo_vmware.api [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293055, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.259066] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293054, 'name': CreateVM_Task, 'duration_secs': 0.416839} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.259298] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 879.260116] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.260298] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.260641] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 879.260905] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91248c9a-9400-4c3e-8d86-d765c474b448 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.265978] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 879.265978] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526b3420-a825-8e82-c9c5-72833f7fcde0" [ 879.265978] env[61852]: _type = "Task" [ 879.265978] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.277194] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526b3420-a825-8e82-c9c5-72833f7fcde0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.512079] env[61852]: DEBUG nova.network.neutron [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Updated VIF entry in instance network info cache for port c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 879.512538] env[61852]: DEBUG nova.network.neutron [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Updating instance_info_cache with network_info: [{"id": "1fce3501-a013-4bf6-a413-f63b810e42d6", "address": "fa:16:3e:46:bd:7b", "network": {"id": "323f546d-d6b6-4e75-98e4-f0b79b6a6884", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1156411187", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fce3501-a0", "ovs_interfaceid": "1fce3501-a013-4bf6-a413-f63b810e42d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa", "address": "fa:16:3e:df:ec:a4", "network": {"id": "8b4ef00f-12ce-405e-9c14-6f46a2a379c0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1197741628", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0c7f48c684044564b9081d6bc04c7e29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1ef5d89-7e", "ovs_interfaceid": "c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.523498] env[61852]: DEBUG oslo_vmware.rw_handles [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520553ff-d1a8-2a66-f89a-e3be909575bb/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 879.524490] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bab298-c07a-4333-a754-4e0258c209b0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.533698] env[61852]: DEBUG oslo_vmware.rw_handles [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520553ff-d1a8-2a66-f89a-e3be909575bb/disk-0.vmdk is in state: ready. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 879.533698] env[61852]: ERROR oslo_vmware.rw_handles [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520553ff-d1a8-2a66-f89a-e3be909575bb/disk-0.vmdk due to incomplete transfer. [ 879.533698] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4cddc0a9-fdc6-477a-839f-02dd9e81c9fe {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.539621] env[61852]: DEBUG oslo_vmware.rw_handles [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520553ff-d1a8-2a66-f89a-e3be909575bb/disk-0.vmdk. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 879.539850] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Uploaded image ca674796-50b0-4a64-90f2-d0e6a238a167 to the Glance image server {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 879.542224] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Destroying the VM {{(pid=61852) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 879.542510] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0188af0d-eaaa-4ba3-8e21-dd318ad38fed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.548521] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 879.548521] env[61852]: value = "task-1293056" [ 879.548521] env[61852]: _type = "Task" [ 879.548521] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.556148] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293056, 'name': Destroy_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.556917] env[61852]: DEBUG oslo_concurrency.lockutils [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.797s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.559012] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.163s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.560602] env[61852]: INFO nova.compute.claims [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 879.576767] env[61852]: INFO nova.scheduler.client.report [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Deleted allocations for instance 21d74604-6a64-44ee-a012-ebff7166853e [ 879.624717] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 879.624972] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 879.625183] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.625485] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 879.625654] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.625807] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 879.626038] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 879.626219] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 879.626397] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 879.626589] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 879.626787] env[61852]: DEBUG nova.virt.hardware [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 879.627880] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb72315-1d87-45a4-bc95-ef1ca33257c8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.636685] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab70bdf9-24da-41cd-9792-a79deb83a9ac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.650570] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:62:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36d2d052-6d80-4ad5-bb1b-2c54679bd05b', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.658115] env[61852]: DEBUG oslo.service.loopingcall [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.658464] env[61852]: DEBUG nova.compute.manager [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 879.660868] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 879.661277] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4bcd42db-bdb2-48c6-8236-c86ca0bb0988 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.680911] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.680911] env[61852]: value = "task-1293057" [ 879.680911] env[61852]: _type = "Task" [ 879.680911] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.688622] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293057, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.723974] env[61852]: DEBUG oslo_vmware.api [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293055, 'name': PowerOffVM_Task, 'duration_secs': 0.186344} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.724341] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.724530] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.724828] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba905fcb-b64b-469f-aef3-de70e76faeef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.778829] env[61852]: DEBUG nova.compute.manager [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 879.781211] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526b3420-a825-8e82-c9c5-72833f7fcde0, 'name': SearchDatastore_Task, 'duration_secs': 0.01212} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.781788] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.782070] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.782401] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.782562] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.782802] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.783152] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a446103-8334-468a-813a-8bc3743ebd17 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.786896] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.786896] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.787070] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Deleting the datastore file [datastore1] 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.787603] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15e51b04-30bc-40ab-a5c8-74f58b465351 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.792953] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.793169] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 879.795038] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ad4a2dc-ff37-4a29-8c4a-e75f68d72a29 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.797749] env[61852]: DEBUG oslo_vmware.api [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 879.797749] env[61852]: value = "task-1293059" [ 879.797749] env[61852]: _type = "Task" [ 879.797749] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.806823] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 879.806823] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52527bc5-a2ea-1243-9d27-db28a9945b93" [ 879.806823] env[61852]: _type = "Task" [ 879.806823] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.809011] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 879.809268] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 879.809451] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.809664] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 879.809843] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.810032] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 879.811595] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 879.811595] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 879.811595] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 879.811595] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 879.811595] env[61852]: DEBUG nova.virt.hardware [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 879.812225] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a589755-8b85-4d11-ae5c-86fbd4f77b68 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.820412] env[61852]: DEBUG oslo_vmware.api [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293059, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.828279] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626d78a6-2076-4657-9e38-a860a066be7c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.831872] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52527bc5-a2ea-1243-9d27-db28a9945b93, 'name': SearchDatastore_Task, 'duration_secs': 0.016303} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.832861] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0896671-c52b-4008-b1b1-cf1b29a40b74 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.844909] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 879.844909] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526d8520-17a8-017c-c05b-2511cd0633b6" [ 879.844909] env[61852]: _type = "Task" [ 879.844909] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.851949] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526d8520-17a8-017c-c05b-2511cd0633b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.016482] env[61852]: DEBUG oslo_concurrency.lockutils [req-1a71779d-8109-48c1-90e7-dc5964270efc req-ea1016a6-2419-4af3-8623-44f9b0218176 service nova] Releasing lock "refresh_cache-eae1ad1f-f213-4227-93aa-b0ccf660e638" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.060229] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293056, 'name': Destroy_Task, 'duration_secs': 0.337962} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.060229] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Destroyed the VM [ 880.060229] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Deleting Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 880.060229] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2e2fc4c1-8574-4bef-afda-dc38295895a7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.070518] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 880.070518] env[61852]: value = "task-1293060" [ 880.070518] env[61852]: _type = "Task" [ 880.070518] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.079242] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293060, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.088578] env[61852]: DEBUG oslo_concurrency.lockutils [None req-92fbf3d6-f527-4a26-9c68-0b90041f87bf tempest-VolumesAdminNegativeTest-571670340 tempest-VolumesAdminNegativeTest-571670340-project-member] Lock "21d74604-6a64-44ee-a012-ebff7166853e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.217s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.149926] env[61852]: DEBUG nova.network.neutron [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Successfully updated port: 3377bf3e-e158-4caa-a81f-8ff46b934338 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.187841] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.192578] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293057, 'name': CreateVM_Task, 'duration_secs': 0.31984} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.192769] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 880.193588] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.193765] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.194118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 880.194429] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29345894-5a0b-4e3d-b95e-d27cd95b7aba {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.199301] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 880.199301] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5268bc75-039f-68af-cc2f-55277ee2674a" [ 880.199301] env[61852]: _type = "Task" [ 880.199301] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.208246] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5268bc75-039f-68af-cc2f-55277ee2674a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.317854] env[61852]: DEBUG oslo_vmware.api [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293059, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199202} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.317854] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.317854] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.317854] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.317854] env[61852]: INFO nova.compute.manager [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Took 1.12 seconds to destroy the instance on the hypervisor. [ 880.317854] env[61852]: DEBUG oslo.service.loopingcall [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.317854] env[61852]: DEBUG nova.compute.manager [-] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 880.317854] env[61852]: DEBUG nova.network.neutron [-] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.328695] env[61852]: DEBUG nova.compute.manager [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Received event network-vif-plugged-3377bf3e-e158-4caa-a81f-8ff46b934338 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 880.329053] env[61852]: DEBUG oslo_concurrency.lockutils [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] Acquiring lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.329556] env[61852]: DEBUG oslo_concurrency.lockutils [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] Lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.329833] env[61852]: DEBUG oslo_concurrency.lockutils [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] Lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.330096] env[61852]: DEBUG nova.compute.manager [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] No waiting events found dispatching network-vif-plugged-3377bf3e-e158-4caa-a81f-8ff46b934338 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 880.331024] env[61852]: WARNING nova.compute.manager [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Received unexpected event network-vif-plugged-3377bf3e-e158-4caa-a81f-8ff46b934338 for instance with vm_state building and task_state spawning. [ 880.331024] env[61852]: DEBUG nova.compute.manager [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Received event network-changed-3377bf3e-e158-4caa-a81f-8ff46b934338 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 880.331024] env[61852]: DEBUG nova.compute.manager [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Refreshing instance network info cache due to event network-changed-3377bf3e-e158-4caa-a81f-8ff46b934338. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 880.331240] env[61852]: DEBUG oslo_concurrency.lockutils [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] Acquiring lock "refresh_cache-12e431d3-4c23-4f4c-a619-f0b69a0e31e8" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.331441] env[61852]: DEBUG oslo_concurrency.lockutils [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] Acquired lock "refresh_cache-12e431d3-4c23-4f4c-a619-f0b69a0e31e8" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.331733] env[61852]: DEBUG nova.network.neutron [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Refreshing network info cache for port 3377bf3e-e158-4caa-a81f-8ff46b934338 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 880.357856] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526d8520-17a8-017c-c05b-2511cd0633b6, 'name': SearchDatastore_Task, 'duration_secs': 0.009232} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.358595] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.358595] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] eae1ad1f-f213-4227-93aa-b0ccf660e638/eae1ad1f-f213-4227-93aa-b0ccf660e638.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.358721] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4d57c7f-fd4c-4011-8783-bd66d61dc224 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.365961] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 880.365961] env[61852]: value = "task-1293061" [ 880.365961] env[61852]: _type = "Task" [ 880.365961] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.373876] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293061, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.588259] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293060, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.657018] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "refresh_cache-12e431d3-4c23-4f4c-a619-f0b69a0e31e8" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.714597] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5268bc75-039f-68af-cc2f-55277ee2674a, 'name': SearchDatastore_Task, 'duration_secs': 0.009276} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.715039] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.715309] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 880.715550] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.715710] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.715931] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.716261] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d69cb06f-cef1-4c01-80b9-13da261c27c4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.736065] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.736316] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 880.740106] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61cba1af-b262-4893-b688-6f4ce2e9d21b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.745651] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 880.745651] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527602e9-d3db-2758-d961-5be0e8ed3468" [ 880.745651] env[61852]: _type = "Task" [ 880.745651] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.760729] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527602e9-d3db-2758-d961-5be0e8ed3468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.804839] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd482b5-1c70-48fa-bb25-062aba7ada93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.815230] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1186d168-ce78-4fbd-9c5e-9c751a921129 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.850751] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68f02a2-9d63-4049-8f29-7201eb1be001 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.861619] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cc73d6-e06d-4a90-8580-9c0d0ff3ae2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.881391] env[61852]: DEBUG nova.compute.provider_tree [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.887172] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293061, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486777} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.888272] env[61852]: DEBUG nova.network.neutron [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.889998] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] eae1ad1f-f213-4227-93aa-b0ccf660e638/eae1ad1f-f213-4227-93aa-b0ccf660e638.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 880.890229] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 880.891172] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcc238a8-90c1-444a-8be7-ed1c2b77b13f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.897573] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 880.897573] env[61852]: value = "task-1293062" [ 880.897573] env[61852]: _type = "Task" [ 880.897573] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.905838] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.974689] env[61852]: DEBUG nova.network.neutron [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.062192] env[61852]: DEBUG nova.network.neutron [-] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.085636] env[61852]: DEBUG oslo_vmware.api [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293060, 'name': RemoveSnapshot_Task, 'duration_secs': 0.676331} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.085971] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Deleted Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 881.086294] env[61852]: INFO nova.compute.manager [None req-9a03eb78-ee48-4f4b-9395-65deca381e5b tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Took 15.72 seconds to snapshot the instance on the hypervisor. [ 881.260798] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527602e9-d3db-2758-d961-5be0e8ed3468, 'name': SearchDatastore_Task, 'duration_secs': 0.054132} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.261589] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb91a4be-2271-43dd-9cea-1f6ad4c74d43 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.266607] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 881.266607] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522a7c6e-ea70-3a1a-cbff-14a59a4dada5" [ 881.266607] env[61852]: _type = "Task" [ 881.266607] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.276737] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522a7c6e-ea70-3a1a-cbff-14a59a4dada5, 'name': SearchDatastore_Task} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.276967] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.277245] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92/4b85f2d7-d99a-4332-a78c-3f2a50c7cb92.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 881.277479] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-475eed05-8cd8-4e82-bacf-24e79dfc46e3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.283479] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 881.283479] env[61852]: value = "task-1293063" [ 881.283479] env[61852]: _type = "Task" [ 881.283479] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.290667] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.385330] env[61852]: DEBUG nova.scheduler.client.report [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 881.407191] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069566} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.413377] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.413377] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fda53e7-49c1-486e-804c-b7f9bdbf6379 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.440266] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] eae1ad1f-f213-4227-93aa-b0ccf660e638/eae1ad1f-f213-4227-93aa-b0ccf660e638.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.440631] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4f17344-d754-4f32-8820-e53a8f5ec848 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.461391] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 881.461391] env[61852]: value = "task-1293064" [ 881.461391] env[61852]: _type = "Task" [ 881.461391] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.471902] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293064, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.477665] env[61852]: DEBUG oslo_concurrency.lockutils [req-1329b7b8-fef7-44e2-aae4-8b7d25203964 req-c6917835-b71f-4eed-8b7e-fb48383edaaa service nova] Releasing lock "refresh_cache-12e431d3-4c23-4f4c-a619-f0b69a0e31e8" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.478083] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "refresh_cache-12e431d3-4c23-4f4c-a619-f0b69a0e31e8" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.478311] env[61852]: DEBUG nova.network.neutron [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.557642] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "6cb1968c-b951-4a83-a036-ba50b735133c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.557901] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "6cb1968c-b951-4a83-a036-ba50b735133c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.565577] env[61852]: INFO nova.compute.manager [-] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Took 1.25 seconds to deallocate network for instance. [ 881.793823] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46395} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.794167] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92/4b85f2d7-d99a-4332-a78c-3f2a50c7cb92.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 881.794348] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.794715] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-076ee9b1-0f30-4d11-932d-7ad2d2726723 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.800898] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 881.800898] env[61852]: value = "task-1293065" [ 881.800898] env[61852]: _type = "Task" [ 881.800898] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.808841] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293065, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.891427] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.892319] env[61852]: DEBUG nova.compute.manager [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 881.895918] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.603s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.896171] env[61852]: DEBUG nova.objects.instance [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lazy-loading 'resources' on Instance uuid f8ebb1b7-39c6-486e-ab25-23080d858846 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.971484] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293064, 'name': ReconfigVM_Task, 'duration_secs': 0.431993} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.971806] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Reconfigured VM instance instance-0000004d to attach disk [datastore2] eae1ad1f-f213-4227-93aa-b0ccf660e638/eae1ad1f-f213-4227-93aa-b0ccf660e638.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.972475] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-771a1704-f688-41a0-8562-669722edf217 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.978570] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 881.978570] env[61852]: value = "task-1293066" [ 881.978570] env[61852]: _type = "Task" [ 881.978570] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.987884] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293066, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.028229] env[61852]: DEBUG nova.network.neutron [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.062117] env[61852]: DEBUG nova.compute.manager [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 882.072198] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.181533] env[61852]: DEBUG nova.network.neutron [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Updating instance_info_cache with network_info: [{"id": "3377bf3e-e158-4caa-a81f-8ff46b934338", "address": "fa:16:3e:e6:0c:dd", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3377bf3e-e1", "ovs_interfaceid": "3377bf3e-e158-4caa-a81f-8ff46b934338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.312496] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293065, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067143} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.312772] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 882.313571] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73fdc31-de60-4110-bb43-5da4f9a714a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.342633] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92/4b85f2d7-d99a-4332-a78c-3f2a50c7cb92.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.342633] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c310ad2-8472-4a6e-887b-10e6888a027c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.364664] env[61852]: DEBUG nova.compute.manager [req-6b0c9ee8-584c-4426-b301-c4cda3b22b20 req-b89e7172-70d2-429f-bcf7-7ca8f133d706 service nova] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Received event network-vif-deleted-9444dd57-04ba-4f44-8080-68ec800cc9b6 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.366916] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 882.366916] env[61852]: value = "task-1293067" [ 882.366916] env[61852]: _type = "Task" [ 882.366916] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.377207] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293067, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.398644] env[61852]: DEBUG nova.compute.utils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 882.400082] env[61852]: DEBUG nova.compute.manager [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 882.400182] env[61852]: DEBUG nova.network.neutron [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 882.472346] env[61852]: DEBUG nova.policy [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1349b8262e345068742af657fa8cbd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4dbb543c66364861bf5f437c8c33a550', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 882.487817] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293066, 'name': Rename_Task, 'duration_secs': 0.132772} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.488935] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.488935] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-947745ce-794c-4c53-b220-5ff700d8264f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.499018] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 882.499018] env[61852]: value = "task-1293068" [ 882.499018] env[61852]: _type = "Task" [ 882.499018] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.506937] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293068, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.579102] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.648408] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe144f92-c552-412b-a365-3586d8ea1dd1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.656272] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4de48e-ebb1-41b7-a4b1-d0907c1750eb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.686101] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "refresh_cache-12e431d3-4c23-4f4c-a619-f0b69a0e31e8" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.686416] env[61852]: DEBUG nova.compute.manager [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Instance network_info: |[{"id": "3377bf3e-e158-4caa-a81f-8ff46b934338", "address": "fa:16:3e:e6:0c:dd", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3377bf3e-e1", "ovs_interfaceid": "3377bf3e-e158-4caa-a81f-8ff46b934338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 882.687057] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:0c:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd48f0ef6-34e5-44d4-8baf-4470ed96ce73', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3377bf3e-e158-4caa-a81f-8ff46b934338', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.694639] env[61852]: DEBUG oslo.service.loopingcall [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.695325] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a6a304-80ea-4ed0-aa71-7742cbdff614 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.697870] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 882.698173] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-234019c8-f079-4a68-8905-804ba6a42f31 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.718026] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549592ca-0bed-4123-8454-63bb63522e4d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.723374] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.723374] env[61852]: value = "task-1293069" [ 882.723374] env[61852]: _type = "Task" [ 882.723374] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.734088] env[61852]: DEBUG nova.compute.provider_tree [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.739725] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293069, 'name': CreateVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.876900] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293067, 'name': ReconfigVM_Task, 'duration_secs': 0.275178} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.877588] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92/4b85f2d7-d99a-4332-a78c-3f2a50c7cb92.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.878254] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0357a031-beed-4d88-ad10-dca39ffdd246 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.884809] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 882.884809] env[61852]: value = "task-1293070" [ 882.884809] env[61852]: _type = "Task" [ 882.884809] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.895017] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293070, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.902181] env[61852]: DEBUG nova.network.neutron [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Successfully created port: 5dd12202-d332-4603-a2aa-3406fc1413b6 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.905729] env[61852]: DEBUG nova.compute.manager [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 883.010426] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293068, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.233871] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293069, 'name': CreateVM_Task, 'duration_secs': 0.354012} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.234067] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 883.234807] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.234989] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.235346] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 883.235613] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e04e29f-764c-42de-bdf4-462098d5a5dd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.237823] env[61852]: DEBUG nova.scheduler.client.report [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 883.246133] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 883.246133] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e84371-4e22-9b07-2617-6546248b8782" [ 883.246133] env[61852]: _type = "Task" [ 883.246133] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.252265] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e84371-4e22-9b07-2617-6546248b8782, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.396433] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293070, 'name': Rename_Task, 'duration_secs': 0.147992} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.396433] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 883.397013] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cec5d64-5106-4bc2-8ee7-6e7adff24fe1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.406016] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 883.406016] env[61852]: value = "task-1293071" [ 883.406016] env[61852]: _type = "Task" [ 883.406016] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.419232] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293071, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.508475] env[61852]: DEBUG oslo_vmware.api [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293068, 'name': PowerOnVM_Task, 'duration_secs': 0.607627} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.508634] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.508812] env[61852]: INFO nova.compute.manager [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Took 10.19 seconds to spawn the instance on the hypervisor. [ 883.508994] env[61852]: DEBUG nova.compute.manager [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 883.509944] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfc0a86-5124-49c9-8c68-4b464819c7a7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.742469] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.846s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.745012] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.408s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.745487] env[61852]: DEBUG nova.objects.instance [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lazy-loading 'resources' on Instance uuid 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.763219] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e84371-4e22-9b07-2617-6546248b8782, 'name': SearchDatastore_Task, 'duration_secs': 0.009573} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.763674] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.763838] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 883.764045] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.764591] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.764591] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.764707] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d7a67b0-50a5-451e-95b5-e45bad3d8dc1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.782514] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.782913] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 883.783765] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9eef598-9d7a-481c-b5d4-9659f1d55811 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.793407] env[61852]: INFO nova.scheduler.client.report [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Deleted allocations for instance f8ebb1b7-39c6-486e-ab25-23080d858846 [ 883.800762] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 883.800762] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5244c9d3-b021-64b4-9ca2-4c63b1817ba1" [ 883.800762] env[61852]: _type = "Task" [ 883.800762] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.809370] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5244c9d3-b021-64b4-9ca2-4c63b1817ba1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.914135] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293071, 'name': PowerOnVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.920894] env[61852]: DEBUG nova.compute.manager [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 883.948120] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 883.948120] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 883.948120] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.948120] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 883.948120] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.948120] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 883.948120] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 883.948120] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 883.950070] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 883.950580] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 883.951038] env[61852]: DEBUG nova.virt.hardware [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 883.952228] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fc25fa-6e59-432d-ba3a-d4871e57b70c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.962405] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630b5fc6-28ef-4645-bcd8-673576b1bf49 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.026025] env[61852]: INFO nova.compute.manager [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Took 28.87 seconds to build instance. [ 884.316014] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d5882d66-6165-4b06-9661-100e41f3b59c tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "f8ebb1b7-39c6-486e-ab25-23080d858846" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.938s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.327686] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "eae1ad1f-f213-4227-93aa-b0ccf660e638" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.328033] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5244c9d3-b021-64b4-9ca2-4c63b1817ba1, 'name': SearchDatastore_Task, 'duration_secs': 0.01281} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.328993] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-098dab9e-1c9c-4e21-b3e2-d79497b5afbd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.334660] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 884.334660] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9d48b-e40f-b218-9d85-3b41eabe78e9" [ 884.334660] env[61852]: _type = "Task" [ 884.334660] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.346023] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9d48b-e40f-b218-9d85-3b41eabe78e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.417919] env[61852]: DEBUG oslo_vmware.api [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293071, 'name': PowerOnVM_Task, 'duration_secs': 0.674931} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.421171] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 884.421865] env[61852]: DEBUG nova.compute.manager [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 884.423246] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae08a3ed-307e-4b5b-b0f9-d6a02c3a3305 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.430642] env[61852]: DEBUG nova.network.neutron [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Successfully updated port: 5dd12202-d332-4603-a2aa-3406fc1413b6 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 884.449723] env[61852]: DEBUG nova.compute.manager [req-d55609e1-2416-425e-83ac-38cc9fdb98a7 req-3ca4b54e-c97c-4635-becf-82ef37d104e1 service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Received event network-vif-plugged-5dd12202-d332-4603-a2aa-3406fc1413b6 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.449888] env[61852]: DEBUG oslo_concurrency.lockutils [req-d55609e1-2416-425e-83ac-38cc9fdb98a7 req-3ca4b54e-c97c-4635-becf-82ef37d104e1 service nova] Acquiring lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.450340] env[61852]: DEBUG oslo_concurrency.lockutils [req-d55609e1-2416-425e-83ac-38cc9fdb98a7 req-3ca4b54e-c97c-4635-becf-82ef37d104e1 service nova] Lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.450340] env[61852]: DEBUG oslo_concurrency.lockutils [req-d55609e1-2416-425e-83ac-38cc9fdb98a7 req-3ca4b54e-c97c-4635-becf-82ef37d104e1 service nova] Lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.450464] env[61852]: DEBUG nova.compute.manager [req-d55609e1-2416-425e-83ac-38cc9fdb98a7 req-3ca4b54e-c97c-4635-becf-82ef37d104e1 service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] No waiting events found dispatching network-vif-plugged-5dd12202-d332-4603-a2aa-3406fc1413b6 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 884.451194] env[61852]: WARNING nova.compute.manager [req-d55609e1-2416-425e-83ac-38cc9fdb98a7 req-3ca4b54e-c97c-4635-becf-82ef37d104e1 service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Received unexpected event network-vif-plugged-5dd12202-d332-4603-a2aa-3406fc1413b6 for instance with vm_state building and task_state spawning. [ 884.530754] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1cb433a1-507a-4d32-a7dc-896c729faa96 tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "eae1ad1f-f213-4227-93aa-b0ccf660e638" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.395s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.530946] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "eae1ad1f-f213-4227-93aa-b0ccf660e638" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.203s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.531958] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "eae1ad1f-f213-4227-93aa-b0ccf660e638-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.532050] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "eae1ad1f-f213-4227-93aa-b0ccf660e638-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.532202] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "eae1ad1f-f213-4227-93aa-b0ccf660e638-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.534289] env[61852]: INFO nova.compute.manager [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Terminating instance [ 884.535965] env[61852]: DEBUG nova.compute.manager [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 884.536214] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.537129] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cb9618-175e-4952-be73-545462e3c15d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.542407] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76073a38-0b2a-4e1b-aaf4-7cc82c3563f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.549484] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.551365] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5556d28f-c732-4987-96da-722ae741eca0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.553486] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3102f128-1f06-449e-8e96-9bb8a9dd2806 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.586867] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6473a2d-653a-4cdd-bdf0-a79ef3681310 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.589488] env[61852]: DEBUG oslo_vmware.api [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 884.589488] env[61852]: value = "task-1293072" [ 884.589488] env[61852]: _type = "Task" [ 884.589488] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.596653] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c9df40-51d1-4800-9db9-cd34d312a582 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.603080] env[61852]: DEBUG oslo_vmware.api [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.612739] env[61852]: DEBUG nova.compute.provider_tree [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.847068] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c9d48b-e40f-b218-9d85-3b41eabe78e9, 'name': SearchDatastore_Task, 'duration_secs': 0.009226} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.847380] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.847645] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 12e431d3-4c23-4f4c-a619-f0b69a0e31e8/12e431d3-4c23-4f4c-a619-f0b69a0e31e8.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 884.847905] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53addc16-7c23-4b49-bcb6-410324c7d3a1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.854557] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 884.854557] env[61852]: value = "task-1293073" [ 884.854557] env[61852]: _type = "Task" [ 884.854557] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.864590] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.934041] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "refresh_cache-b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.934041] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "refresh_cache-b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.934041] env[61852]: DEBUG nova.network.neutron [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 884.951453] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.661747] env[61852]: DEBUG nova.scheduler.client.report [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 885.665097] env[61852]: DEBUG oslo_concurrency.lockutils [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.665364] env[61852]: DEBUG oslo_concurrency.lockutils [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.667020] env[61852]: DEBUG oslo_concurrency.lockutils [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.667020] env[61852]: DEBUG oslo_concurrency.lockutils [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.667020] env[61852]: DEBUG oslo_concurrency.lockutils [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.675593] env[61852]: INFO nova.compute.manager [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Terminating instance [ 885.677581] env[61852]: DEBUG nova.compute.manager [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 885.677673] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 885.680298] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70acad51-04c3-49fb-a3cc-b378100267c3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.686404] env[61852]: DEBUG oslo_vmware.api [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293072, 'name': PowerOffVM_Task, 'duration_secs': 0.183667} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.691948] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.692101] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.692457] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 885.693470] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293073, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525587} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.693682] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7e89255-3ef6-4311-9386-55c3987b9eb1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.695096] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30c90559-a395-4df9-906e-2e47a9c3bd8e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.696414] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 12e431d3-4c23-4f4c-a619-f0b69a0e31e8/12e431d3-4c23-4f4c-a619-f0b69a0e31e8.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 885.696617] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 885.697081] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4736b20f-dd30-49ba-bfdc-c1fae82dc5b8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.704255] env[61852]: DEBUG oslo_vmware.api [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 885.704255] env[61852]: value = "task-1293075" [ 885.704255] env[61852]: _type = "Task" [ 885.704255] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.705353] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 885.705353] env[61852]: value = "task-1293076" [ 885.705353] env[61852]: _type = "Task" [ 885.705353] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.716138] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293076, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.718983] env[61852]: DEBUG oslo_vmware.api [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293075, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.721075] env[61852]: DEBUG nova.network.neutron [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.859864] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.860227] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.860490] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Deleting the datastore file [datastore2] eae1ad1f-f213-4227-93aa-b0ccf660e638 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.861060] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-868eb358-0c60-4db3-accc-94eff9de62f9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.871860] env[61852]: DEBUG oslo_vmware.api [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for the task: (returnval){ [ 885.871860] env[61852]: value = "task-1293077" [ 885.871860] env[61852]: _type = "Task" [ 885.871860] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.880074] env[61852]: DEBUG oslo_vmware.api [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.905722] env[61852]: DEBUG nova.network.neutron [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Updating instance_info_cache with network_info: [{"id": "5dd12202-d332-4603-a2aa-3406fc1413b6", "address": "fa:16:3e:11:0c:0e", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dd12202-d3", "ovs_interfaceid": "5dd12202-d332-4603-a2aa-3406fc1413b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.907617] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "51ecc9c3-a3fc-4bd7-8c90-003451700212" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.907857] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.170816] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.426s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.174653] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.243s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.174923] env[61852]: DEBUG nova.objects.instance [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'resources' on Instance uuid d3922357-383f-4f7e-9c76-4eb688a092b9 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.199641] env[61852]: INFO nova.scheduler.client.report [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Deleted allocations for instance 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c [ 886.218009] env[61852]: DEBUG oslo_vmware.api [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293075, 'name': PowerOffVM_Task, 'duration_secs': 0.187398} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.220777] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 886.220969] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 886.221345] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293076, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065402} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.221613] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a7cf52f-7e0f-45ea-b42b-a91c7e9629b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.223192] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 886.224037] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00d93a1-4df1-42bc-8255-e3ffd7a4f080 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.248562] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 12e431d3-4c23-4f4c-a619-f0b69a0e31e8/12e431d3-4c23-4f4c-a619-f0b69a0e31e8.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.248872] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7db3582b-b977-4e41-a1ae-2a36a4949ad8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.269387] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 886.269387] env[61852]: value = "task-1293079" [ 886.269387] env[61852]: _type = "Task" [ 886.269387] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.277784] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293079, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.309072] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 886.309317] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 886.309525] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleting the datastore file [datastore2] 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 886.309850] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c743db30-e9d3-4844-ba6e-f44e9ebf9ca4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.316701] env[61852]: DEBUG oslo_vmware.api [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 886.316701] env[61852]: value = "task-1293080" [ 886.316701] env[61852]: _type = "Task" [ 886.316701] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.325431] env[61852]: DEBUG oslo_vmware.api [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.381706] env[61852]: DEBUG oslo_vmware.api [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Task: {'id': task-1293077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178129} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.382071] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 886.382273] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 886.382454] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 886.382631] env[61852]: INFO nova.compute.manager [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Took 1.85 seconds to destroy the instance on the hypervisor. [ 886.382879] env[61852]: DEBUG oslo.service.loopingcall [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.383229] env[61852]: DEBUG nova.compute.manager [-] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 886.383370] env[61852]: DEBUG nova.network.neutron [-] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.408502] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "refresh_cache-b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.412025] env[61852]: DEBUG nova.compute.manager [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Instance network_info: |[{"id": "5dd12202-d332-4603-a2aa-3406fc1413b6", "address": "fa:16:3e:11:0c:0e", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dd12202-d3", "ovs_interfaceid": "5dd12202-d332-4603-a2aa-3406fc1413b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 886.412025] env[61852]: DEBUG nova.compute.manager [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 886.412990] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:0c:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2c424c9-6446-4b2a-af8c-4d9c29117c39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5dd12202-d332-4603-a2aa-3406fc1413b6', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 886.426544] env[61852]: DEBUG oslo.service.loopingcall [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.427611] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 886.427998] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee65a1fb-22e7-4330-b3d0-d8c216ff573e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.448549] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 886.448549] env[61852]: value = "task-1293081" [ 886.448549] env[61852]: _type = "Task" [ 886.448549] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.461145] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293081, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.489656] env[61852]: DEBUG nova.compute.manager [req-a0861ab5-7412-4edf-acfb-7098ece971d7 req-bce2c981-daeb-4134-ad3c-176293cff663 service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Received event network-changed-5dd12202-d332-4603-a2aa-3406fc1413b6 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 886.489930] env[61852]: DEBUG nova.compute.manager [req-a0861ab5-7412-4edf-acfb-7098ece971d7 req-bce2c981-daeb-4134-ad3c-176293cff663 service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Refreshing instance network info cache due to event network-changed-5dd12202-d332-4603-a2aa-3406fc1413b6. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 886.490209] env[61852]: DEBUG oslo_concurrency.lockutils [req-a0861ab5-7412-4edf-acfb-7098ece971d7 req-bce2c981-daeb-4134-ad3c-176293cff663 service nova] Acquiring lock "refresh_cache-b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.490396] env[61852]: DEBUG oslo_concurrency.lockutils [req-a0861ab5-7412-4edf-acfb-7098ece971d7 req-bce2c981-daeb-4134-ad3c-176293cff663 service nova] Acquired lock "refresh_cache-b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.490607] env[61852]: DEBUG nova.network.neutron [req-a0861ab5-7412-4edf-acfb-7098ece971d7 req-bce2c981-daeb-4134-ad3c-176293cff663 service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Refreshing network info cache for port 5dd12202-d332-4603-a2aa-3406fc1413b6 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 886.710512] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c77f38ba-fdb2-49ad-b3b1-38c3bfc43b56 tempest-ListServersNegativeTestJSON-1453593109 tempest-ListServersNegativeTestJSON-1453593109-project-member] Lock "883a0d5a-f775-4ffc-abf0-921d0ea6cc8c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.268s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.779290] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293079, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.826998] env[61852]: DEBUG oslo_vmware.api [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.321925} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.829838] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 886.830277] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 886.830714] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 886.830874] env[61852]: INFO nova.compute.manager [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Took 1.15 seconds to destroy the instance on the hypervisor. [ 886.831165] env[61852]: DEBUG oslo.service.loopingcall [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.832171] env[61852]: DEBUG nova.compute.manager [-] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 886.832332] env[61852]: DEBUG nova.network.neutron [-] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.939355] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993613ff-eeeb-4a9d-bcd9-a83a62b652cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.944256] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.947104] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f855b0-24e9-49de-9444-9587ac82bfac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.960109] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293081, 'name': CreateVM_Task, 'duration_secs': 0.374543} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.983678] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 886.984792] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.984986] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.985332] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 886.986112] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a2f756-8a48-426d-8144-0cd56f7da66a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.988744] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1ae7eb5-5519-4b06-81b7-d0a9f15d7f07 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.998437] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd767db-e78a-41fb-9641-60398d66c4a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.002217] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 887.002217] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fec0d9-5288-3e7c-f434-e749fe6655ee" [ 887.002217] env[61852]: _type = "Task" [ 887.002217] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.013630] env[61852]: DEBUG nova.compute.provider_tree [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.020557] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52fec0d9-5288-3e7c-f434-e749fe6655ee, 'name': SearchDatastore_Task, 'duration_secs': 0.012442} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.020716] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.020936] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.021188] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.021338] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.021520] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 887.021775] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef1031ed-f493-4451-8033-76b405bff97a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.032327] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 887.032533] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 887.033464] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a5e6cba-f363-4688-ba7d-f5efda5f98b6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.038894] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 887.038894] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52377f8d-993e-7535-45cf-8ed5af31c398" [ 887.038894] env[61852]: _type = "Task" [ 887.038894] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.046415] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52377f8d-993e-7535-45cf-8ed5af31c398, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.279914] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293079, 'name': ReconfigVM_Task, 'duration_secs': 0.520209} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.280289] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 12e431d3-4c23-4f4c-a619-f0b69a0e31e8/12e431d3-4c23-4f4c-a619-f0b69a0e31e8.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.280869] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb35c824-3142-4d4f-a92b-915f6f9599d7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.287561] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 887.287561] env[61852]: value = "task-1293082" [ 887.287561] env[61852]: _type = "Task" [ 887.287561] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.297124] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293082, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.373742] env[61852]: DEBUG nova.network.neutron [-] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.421375] env[61852]: DEBUG nova.network.neutron [req-a0861ab5-7412-4edf-acfb-7098ece971d7 req-bce2c981-daeb-4134-ad3c-176293cff663 service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Updated VIF entry in instance network info cache for port 5dd12202-d332-4603-a2aa-3406fc1413b6. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 887.421855] env[61852]: DEBUG nova.network.neutron [req-a0861ab5-7412-4edf-acfb-7098ece971d7 req-bce2c981-daeb-4134-ad3c-176293cff663 service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Updating instance_info_cache with network_info: [{"id": "5dd12202-d332-4603-a2aa-3406fc1413b6", "address": "fa:16:3e:11:0c:0e", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dd12202-d3", "ovs_interfaceid": "5dd12202-d332-4603-a2aa-3406fc1413b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.516349] env[61852]: DEBUG nova.scheduler.client.report [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 887.552975] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52377f8d-993e-7535-45cf-8ed5af31c398, 'name': SearchDatastore_Task, 'duration_secs': 0.039637} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.553831] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eafb5cea-ccfd-42de-b97e-38b6c8cf3fce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.561114] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 887.561114] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529a0cde-a128-f394-0c7d-7c55ef7c96a3" [ 887.561114] env[61852]: _type = "Task" [ 887.561114] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.569167] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529a0cde-a128-f394-0c7d-7c55ef7c96a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.625409] env[61852]: DEBUG nova.compute.manager [req-c507923a-41dd-4664-9ed9-80139a48a7a3 req-fde37474-0ee5-4eac-bda9-431376e47a9a service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Received event network-vif-deleted-36d2d052-6d80-4ad5-bb1b-2c54679bd05b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 887.625696] env[61852]: INFO nova.compute.manager [req-c507923a-41dd-4664-9ed9-80139a48a7a3 req-fde37474-0ee5-4eac-bda9-431376e47a9a service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Neutron deleted interface 36d2d052-6d80-4ad5-bb1b-2c54679bd05b; detaching it from the instance and deleting it from the info cache [ 887.625929] env[61852]: DEBUG nova.network.neutron [req-c507923a-41dd-4664-9ed9-80139a48a7a3 req-fde37474-0ee5-4eac-bda9-431376e47a9a service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.803429] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293082, 'name': Rename_Task, 'duration_secs': 0.177719} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.804173] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 887.806517] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19fc445e-b49d-4867-b80d-475ece33a08b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.814555] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 887.814555] env[61852]: value = "task-1293083" [ 887.814555] env[61852]: _type = "Task" [ 887.814555] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.829409] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293083, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.876821] env[61852]: INFO nova.compute.manager [-] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Took 1.49 seconds to deallocate network for instance. [ 887.926927] env[61852]: DEBUG oslo_concurrency.lockutils [req-a0861ab5-7412-4edf-acfb-7098ece971d7 req-bce2c981-daeb-4134-ad3c-176293cff663 service nova] Releasing lock "refresh_cache-b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.023165] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.849s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.028279] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.897s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.028279] env[61852]: DEBUG nova.objects.instance [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lazy-loading 'resources' on Instance uuid 8897a654-6805-45b0-b12b-16f7981d33ad {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 888.058402] env[61852]: INFO nova.scheduler.client.report [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Deleted allocations for instance d3922357-383f-4f7e-9c76-4eb688a092b9 [ 888.079247] env[61852]: DEBUG nova.network.neutron [-] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.080531] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529a0cde-a128-f394-0c7d-7c55ef7c96a3, 'name': SearchDatastore_Task, 'duration_secs': 0.00955} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.081022] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.081825] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] b44c9cc0-3f2b-495a-87ee-f03de8dcec3c/b44c9cc0-3f2b-495a-87ee-f03de8dcec3c.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 888.086521] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6c50d5a-9f4b-4739-80f4-1ecc1dd2f5d4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.096183] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 888.096183] env[61852]: value = "task-1293084" [ 888.096183] env[61852]: _type = "Task" [ 888.096183] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.107296] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.132146] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1098641-a868-4bb5-8ef6-abd6ac920233 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.140172] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0e7ef5-7e5c-458a-8e2e-6fbe084381be {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.170689] env[61852]: DEBUG nova.compute.manager [req-c507923a-41dd-4664-9ed9-80139a48a7a3 req-fde37474-0ee5-4eac-bda9-431376e47a9a service nova] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Detach interface failed, port_id=36d2d052-6d80-4ad5-bb1b-2c54679bd05b, reason: Instance 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 888.326978] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293083, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.388431] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.569257] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ef387a0b-d2d8-40d1-bea9-c5cf7e21da51 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d3922357-383f-4f7e-9c76-4eb688a092b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.839s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.570601] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] Acquired lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.571759] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7910b6-dc21-4c2e-95ee-45014b60c574 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.585927] env[61852]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 888.586193] env[61852]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=61852) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 888.586666] env[61852]: INFO nova.compute.manager [-] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Took 1.75 seconds to deallocate network for instance. [ 888.591070] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce6f6f74-3775-46ed-a8b0-f19b09e4bf92 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.613151] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9b78b7-49e3-4c2d-b779-e5c94e8268b1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.631538] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293084, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489608} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.632382] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] b44c9cc0-3f2b-495a-87ee-f03de8dcec3c/b44c9cc0-3f2b-495a-87ee-f03de8dcec3c.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 888.632506] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 888.632816] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb94966d-a139-4604-a128-2c11ed77948c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.637081] env[61852]: DEBUG nova.compute.manager [req-a50ab56d-a906-42cd-abd1-8eab24eea6a7 req-3921e569-d035-4872-a166-23bce3a68d1b service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Received event network-vif-deleted-1fce3501-a013-4bf6-a413-f63b810e42d6 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.637628] env[61852]: DEBUG nova.compute.manager [req-a50ab56d-a906-42cd-abd1-8eab24eea6a7 req-3921e569-d035-4872-a166-23bce3a68d1b service nova] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Received event network-vif-deleted-c1ef5d89-7e6e-47bc-b2f8-b1d96731a3fa {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.648950] env[61852]: ERROR root [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-277306' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-277306' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-277306' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-277306'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-277306' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-277306' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-277306'}\n"]: nova.exception.InstanceNotFound: Instance d3922357-383f-4f7e-9c76-4eb688a092b9 could not be found. [ 888.649252] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] Releasing lock "d3922357-383f-4f7e-9c76-4eb688a092b9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.649496] env[61852]: DEBUG nova.compute.manager [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Detach interface failed, port_id=b3f3d9b5-9c27-4415-b02c-58c0b1133727, reason: Instance d3922357-383f-4f7e-9c76-4eb688a092b9 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 888.649702] env[61852]: DEBUG nova.compute.manager [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Received event network-changed-40eb747f-021a-4082-9f8d-70a6af6a415e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.649886] env[61852]: DEBUG nova.compute.manager [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Refreshing instance network info cache due to event network-changed-40eb747f-021a-4082-9f8d-70a6af6a415e. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 888.650123] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] Acquiring lock "refresh_cache-254919cb-e3cd-4288-8696-95e632d78a38" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.650398] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] Acquired lock "refresh_cache-254919cb-e3cd-4288-8696-95e632d78a38" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.650468] env[61852]: DEBUG nova.network.neutron [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Refreshing network info cache for port 40eb747f-021a-4082-9f8d-70a6af6a415e {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.656193] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 888.656193] env[61852]: value = "task-1293085" [ 888.656193] env[61852]: _type = "Task" [ 888.656193] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.666450] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293085, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.825196] env[61852]: DEBUG oslo_vmware.api [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293083, 'name': PowerOnVM_Task, 'duration_secs': 1.003722} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.826518] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 888.826760] env[61852]: INFO nova.compute.manager [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Took 9.05 seconds to spawn the instance on the hypervisor. [ 888.826930] env[61852]: DEBUG nova.compute.manager [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 888.827750] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40da6a3-86c1-4486-b14e-d65573eba259 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.831035] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5965a4-a3db-482b-9ca6-f8a65e5691cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.841917] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fa2533-392b-4b43-a1d6-0071b236d1f3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.883973] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d2be87-6ee1-4252-a754-b4c85d3a9e1e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.890438] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c600fac1-4981-4bf6-a576-eb6fe500ab35 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.904895] env[61852]: DEBUG nova.compute.provider_tree [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.108547] env[61852]: DEBUG oslo_concurrency.lockutils [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.176832] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293085, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067095} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.177310] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 889.178544] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b561c73d-2205-4e9a-b92f-b724b4fc165a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.209543] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] b44c9cc0-3f2b-495a-87ee-f03de8dcec3c/b44c9cc0-3f2b-495a-87ee-f03de8dcec3c.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 889.210154] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-569b9af7-d0a2-4409-bf9d-64c6e99d6b12 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.236443] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 889.236443] env[61852]: value = "task-1293086" [ 889.236443] env[61852]: _type = "Task" [ 889.236443] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.245523] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293086, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.369058] env[61852]: INFO nova.compute.manager [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Took 28.44 seconds to build instance. [ 889.408395] env[61852]: DEBUG nova.scheduler.client.report [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.614023] env[61852]: DEBUG nova.network.neutron [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Updated VIF entry in instance network info cache for port 40eb747f-021a-4082-9f8d-70a6af6a415e. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 889.615296] env[61852]: DEBUG nova.network.neutron [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Updating instance_info_cache with network_info: [{"id": "40eb747f-021a-4082-9f8d-70a6af6a415e", "address": "fa:16:3e:5e:f8:1c", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40eb747f-02", "ovs_interfaceid": "40eb747f-021a-4082-9f8d-70a6af6a415e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.749960] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293086, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.881159] env[61852]: DEBUG oslo_concurrency.lockutils [None req-88648ef3-e4d0-4c43-b0b5-0944807864ab tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.964s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.881564] env[61852]: DEBUG oslo_concurrency.lockutils [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.881771] env[61852]: DEBUG oslo_concurrency.lockutils [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.882506] env[61852]: DEBUG oslo_concurrency.lockutils [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.882506] env[61852]: DEBUG oslo_concurrency.lockutils [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.882506] env[61852]: DEBUG oslo_concurrency.lockutils [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.885219] env[61852]: INFO nova.compute.manager [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Terminating instance [ 889.888467] env[61852]: DEBUG nova.compute.manager [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 889.888729] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 889.889749] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26c09e5-7de6-474c-8d6a-270689b41523 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.899124] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 889.899581] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7d9c45d-9eb5-4629-9f89-005ff6f31d86 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.906302] env[61852]: DEBUG oslo_vmware.api [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 889.906302] env[61852]: value = "task-1293087" [ 889.906302] env[61852]: _type = "Task" [ 889.906302] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.921934] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.894s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.925122] env[61852]: DEBUG oslo_vmware.api [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.926699] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 13.856s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.926699] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.926699] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 889.927114] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.739s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.930792] env[61852]: INFO nova.compute.claims [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 889.934028] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729e9bfc-1b59-4b7f-b2df-9c1c8271de82 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.949315] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b63074c-dac5-439c-9c9d-a9da58151451 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.956376] env[61852]: INFO nova.scheduler.client.report [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Deleted allocations for instance 8897a654-6805-45b0-b12b-16f7981d33ad [ 889.976185] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad53712-2bec-4c00-8c2d-678d3b0a55d0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.987676] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6eb9a9-00f7-48ad-8bc8-4c27e729b271 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.022021] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180777MB free_disk=138GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 890.022021] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.120955] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bae8412-8bbb-4af3-8421-c7668264e309 req-09a5ad18-b157-41bb-baeb-8b727aae007a service nova] Releasing lock "refresh_cache-254919cb-e3cd-4288-8696-95e632d78a38" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.248166] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293086, 'name': ReconfigVM_Task, 'duration_secs': 0.705511} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.248477] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Reconfigured VM instance instance-0000004f to attach disk [datastore1] b44c9cc0-3f2b-495a-87ee-f03de8dcec3c/b44c9cc0-3f2b-495a-87ee-f03de8dcec3c.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 890.249124] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-824f5571-ec0a-4aa2-8ef5-d005cb39e825 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.257200] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 890.257200] env[61852]: value = "task-1293088" [ 890.257200] env[61852]: _type = "Task" [ 890.257200] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.266599] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293088, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.419166] env[61852]: DEBUG oslo_vmware.api [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293087, 'name': PowerOffVM_Task, 'duration_secs': 0.358709} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.419166] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.419166] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 890.419166] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4436347d-6038-4c98-9b7c-a7565c9ea84b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.478366] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7fcaf665-dd4a-42e0-9fc5-2e7a659108ee tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "8897a654-6805-45b0-b12b-16f7981d33ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.298s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.769576] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293088, 'name': Rename_Task, 'duration_secs': 0.311689} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.769800] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.770076] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb80423c-1972-4e9d-985d-b42b64e14d43 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.776520] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 890.776520] env[61852]: value = "task-1293090" [ 890.776520] env[61852]: _type = "Task" [ 890.776520] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.786920] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293090, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.794928] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 890.795224] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 890.795517] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleting the datastore file [datastore2] 12e431d3-4c23-4f4c-a619-f0b69a0e31e8 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 890.795800] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62bb582d-673f-4e12-bfcf-dc961c2481e9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.803408] env[61852]: DEBUG oslo_vmware.api [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 890.803408] env[61852]: value = "task-1293091" [ 890.803408] env[61852]: _type = "Task" [ 890.803408] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.813537] env[61852]: DEBUG oslo_vmware.api [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.818438] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.818702] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.044118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "f18906e9-67b3-4537-9169-9d275e2ec4e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.044118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "f18906e9-67b3-4537-9169-9d275e2ec4e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.044118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "f18906e9-67b3-4537-9169-9d275e2ec4e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.044118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "f18906e9-67b3-4537-9169-9d275e2ec4e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.044118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "f18906e9-67b3-4537-9169-9d275e2ec4e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.044907] env[61852]: INFO nova.compute.manager [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Terminating instance [ 891.047133] env[61852]: DEBUG nova.compute.manager [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 891.047438] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.048383] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd2455b-4cb6-4da8-ab41-953e05b78e2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.059579] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.059860] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b826358-31e2-4485-bea1-f53998b96366 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.067710] env[61852]: DEBUG oslo_vmware.api [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 891.067710] env[61852]: value = "task-1293092" [ 891.067710] env[61852]: _type = "Task" [ 891.067710] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.078817] env[61852]: DEBUG oslo_vmware.api [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293092, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.154859] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e19bf1-9adc-4433-b531-388dcccc2aad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.163998] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff27c34b-d910-4893-ad9a-4234495c15ec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.198992] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb84c92-55b5-480e-881f-184287dbdd59 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.208068] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bdfe63-cb48-4379-91c5-d5ff82c3a3c2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.222514] env[61852]: DEBUG nova.compute.provider_tree [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.286028] env[61852]: DEBUG oslo_vmware.api [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293090, 'name': PowerOnVM_Task, 'duration_secs': 0.473132} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.286770] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.286876] env[61852]: INFO nova.compute.manager [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Took 7.37 seconds to spawn the instance on the hypervisor. [ 891.287034] env[61852]: DEBUG nova.compute.manager [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 891.287845] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675d49bc-3009-4f7d-b563-5f6943163abf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.312272] env[61852]: DEBUG oslo_vmware.api [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137265} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.312428] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 891.312724] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 891.312808] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 891.313100] env[61852]: INFO nova.compute.manager [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Took 1.42 seconds to destroy the instance on the hypervisor. [ 891.313253] env[61852]: DEBUG oslo.service.loopingcall [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.313453] env[61852]: DEBUG nova.compute.manager [-] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 891.313548] env[61852]: DEBUG nova.network.neutron [-] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 891.320732] env[61852]: DEBUG nova.compute.manager [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 891.580978] env[61852]: DEBUG oslo_vmware.api [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293092, 'name': PowerOffVM_Task, 'duration_secs': 0.225848} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.581305] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.581518] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 891.581739] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-333ba05d-cef6-4d0e-bb30-bc569bf13d20 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.664194] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 891.664194] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 891.664194] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Deleting the datastore file [datastore2] f18906e9-67b3-4537-9169-9d275e2ec4e4 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.664571] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2c35db9-8a9c-4cbe-8d0f-551369132d1b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.671970] env[61852]: DEBUG oslo_vmware.api [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for the task: (returnval){ [ 891.671970] env[61852]: value = "task-1293094" [ 891.671970] env[61852]: _type = "Task" [ 891.671970] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.681337] env[61852]: DEBUG oslo_vmware.api [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.708940] env[61852]: DEBUG nova.compute.manager [req-ff3dd0f1-8359-4f38-b543-d38b1887beee req-a2cc2bfa-dcb1-479f-9c11-4a25124d62c8 service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Received event network-vif-deleted-3377bf3e-e158-4caa-a81f-8ff46b934338 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 891.709244] env[61852]: INFO nova.compute.manager [req-ff3dd0f1-8359-4f38-b543-d38b1887beee req-a2cc2bfa-dcb1-479f-9c11-4a25124d62c8 service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Neutron deleted interface 3377bf3e-e158-4caa-a81f-8ff46b934338; detaching it from the instance and deleting it from the info cache [ 891.709459] env[61852]: DEBUG nova.network.neutron [req-ff3dd0f1-8359-4f38-b543-d38b1887beee req-a2cc2bfa-dcb1-479f-9c11-4a25124d62c8 service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.725460] env[61852]: DEBUG nova.scheduler.client.report [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.815073] env[61852]: INFO nova.compute.manager [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Took 28.44 seconds to build instance. [ 891.847368] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.185512] env[61852]: DEBUG oslo_vmware.api [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Task: {'id': task-1293094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136507} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.185778] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.185959] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.186151] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.186326] env[61852]: INFO nova.compute.manager [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 892.186630] env[61852]: DEBUG oslo.service.loopingcall [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.186809] env[61852]: DEBUG nova.network.neutron [-] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.189608] env[61852]: DEBUG nova.compute.manager [-] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 892.189608] env[61852]: DEBUG nova.network.neutron [-] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.212737] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9bbde135-152c-40d2-985d-0507421855ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.223831] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d863dc6-f140-48d7-a8e4-3675026586ce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.235588] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.309s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.236151] env[61852]: DEBUG nova.compute.manager [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 892.238927] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.167s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.239163] env[61852]: DEBUG nova.objects.instance [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lazy-loading 'resources' on Instance uuid 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 892.257174] env[61852]: DEBUG nova.compute.manager [req-ff3dd0f1-8359-4f38-b543-d38b1887beee req-a2cc2bfa-dcb1-479f-9c11-4a25124d62c8 service nova] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Detach interface failed, port_id=3377bf3e-e158-4caa-a81f-8ff46b934338, reason: Instance 12e431d3-4c23-4f4c-a619-f0b69a0e31e8 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 892.317075] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5463b856-2b4d-4b68-82ed-a03929521455 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.961s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.690850] env[61852]: INFO nova.compute.manager [-] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Took 1.38 seconds to deallocate network for instance. [ 892.743446] env[61852]: DEBUG nova.compute.utils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 892.744311] env[61852]: DEBUG nova.compute.manager [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 892.744529] env[61852]: DEBUG nova.network.neutron [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 892.791445] env[61852]: DEBUG nova.policy [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be922c40dddf48c8ae436d0a244e7b6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdac3605118e44a69d44ab56cafe2e21', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 892.928988] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9ea6ff-37e9-4c42-b6da-77c2da8beb47 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.938706] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb7efe3-6aad-4c93-aa65-79d735fd38b9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.971378] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee9c59b-6e94-4da4-9aac-d6771495e7bf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.979757] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463c8543-07a2-4bb4-9f4b-8d41554f4654 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.994991] env[61852]: DEBUG nova.compute.provider_tree [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.140659] env[61852]: DEBUG oslo_concurrency.lockutils [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.140943] env[61852]: DEBUG oslo_concurrency.lockutils [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.141253] env[61852]: DEBUG oslo_concurrency.lockutils [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.141476] env[61852]: DEBUG oslo_concurrency.lockutils [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.141658] env[61852]: DEBUG oslo_concurrency.lockutils [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.144195] env[61852]: INFO nova.compute.manager [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Terminating instance [ 893.146667] env[61852]: DEBUG nova.network.neutron [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Successfully created port: 9dd9d717-68e4-4f79-99f4-be48ad14d8b1 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.148854] env[61852]: DEBUG nova.compute.manager [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 893.149068] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 893.150181] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df3c28e-0362-4470-8ae2-eabbc65d807f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.158122] env[61852]: DEBUG nova.network.neutron [-] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.159190] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.163431] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-baaad845-5de5-4bc2-a701-4867e2298c10 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.171150] env[61852]: DEBUG oslo_vmware.api [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 893.171150] env[61852]: value = "task-1293095" [ 893.171150] env[61852]: _type = "Task" [ 893.171150] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.186593] env[61852]: DEBUG oslo_vmware.api [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.196539] env[61852]: DEBUG oslo_concurrency.lockutils [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.250310] env[61852]: DEBUG nova.compute.manager [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 893.500087] env[61852]: DEBUG nova.scheduler.client.report [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 893.666076] env[61852]: INFO nova.compute.manager [-] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Took 1.48 seconds to deallocate network for instance. [ 893.681615] env[61852]: DEBUG oslo_vmware.api [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293095, 'name': PowerOffVM_Task, 'duration_secs': 0.226058} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.681924] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.682189] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.682456] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66111a39-da1d-43a9-894f-669c7ac3e06a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.744041] env[61852]: DEBUG nova.compute.manager [req-cc2feeda-5c6e-426e-8a0b-bf721bead8ec req-1b31031f-80d0-47cd-bbd4-09153faba624 service nova] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Received event network-vif-deleted-e7aa8d2a-5703-4b7d-9953-bbc015d805d5 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 893.750584] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.750810] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.751024] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleting the datastore file [datastore1] b44c9cc0-3f2b-495a-87ee-f03de8dcec3c {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.751304] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f45ada6b-3905-437c-a6d4-d11c6e314085 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.760447] env[61852]: DEBUG oslo_vmware.api [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 893.760447] env[61852]: value = "task-1293097" [ 893.760447] env[61852]: _type = "Task" [ 893.760447] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.771409] env[61852]: DEBUG oslo_vmware.api [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.003277] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.005696] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.427s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.007461] env[61852]: INFO nova.compute.claims [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.027352] env[61852]: INFO nova.scheduler.client.report [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Deleted allocations for instance 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb [ 894.172083] env[61852]: DEBUG oslo_concurrency.lockutils [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.262786] env[61852]: DEBUG nova.compute.manager [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 894.274718] env[61852]: DEBUG oslo_vmware.api [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170715} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.274985] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.275205] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 894.275465] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 894.275736] env[61852]: INFO nova.compute.manager [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 894.276067] env[61852]: DEBUG oslo.service.loopingcall [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.276322] env[61852]: DEBUG nova.compute.manager [-] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 894.276448] env[61852]: DEBUG nova.network.neutron [-] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 894.289254] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 894.289543] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 894.289743] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.289965] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 894.290178] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.290377] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 894.290647] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 894.290846] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 894.291081] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 894.291293] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 894.291493] env[61852]: DEBUG nova.virt.hardware [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 894.292319] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb7d29c-d0e1-4e7a-8e12-81fee9becd9f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.300753] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f146960d-0083-4a00-a8d3-16ca8059c7dd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.534892] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45cc0253-dbe7-4bfe-9bb3-f0d547440d36 tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "23ff3009-7b13-4d5e-93ed-ca1c3e9127bb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.345s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.569191] env[61852]: DEBUG nova.compute.manager [req-0e9b750f-0bb8-46c7-aa88-d59d7e8bda77 req-123439e3-7d37-4bd5-9643-e1335e8bb77b service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Received event network-vif-deleted-5dd12202-d332-4603-a2aa-3406fc1413b6 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 894.569191] env[61852]: INFO nova.compute.manager [req-0e9b750f-0bb8-46c7-aa88-d59d7e8bda77 req-123439e3-7d37-4bd5-9643-e1335e8bb77b service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Neutron deleted interface 5dd12202-d332-4603-a2aa-3406fc1413b6; detaching it from the instance and deleting it from the info cache [ 894.569191] env[61852]: DEBUG nova.network.neutron [req-0e9b750f-0bb8-46c7-aa88-d59d7e8bda77 req-123439e3-7d37-4bd5-9643-e1335e8bb77b service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.023488] env[61852]: DEBUG nova.network.neutron [-] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.069893] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-211969f2-6411-45f4-bdab-6f13ad75ddd2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.080370] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e7d838-0991-43c2-8b8d-6b9087b2ab5c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.114898] env[61852]: DEBUG nova.compute.manager [req-0e9b750f-0bb8-46c7-aa88-d59d7e8bda77 req-123439e3-7d37-4bd5-9643-e1335e8bb77b service nova] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Detach interface failed, port_id=5dd12202-d332-4603-a2aa-3406fc1413b6, reason: Instance b44c9cc0-3f2b-495a-87ee-f03de8dcec3c could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 895.136229] env[61852]: DEBUG nova.network.neutron [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Successfully updated port: 9dd9d717-68e4-4f79-99f4-be48ad14d8b1 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 895.165205] env[61852]: DEBUG oslo_concurrency.lockutils [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "8d733f93-7636-447b-a5d5-53c16c30061f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.165456] env[61852]: DEBUG oslo_concurrency.lockutils [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "8d733f93-7636-447b-a5d5-53c16c30061f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.165718] env[61852]: DEBUG oslo_concurrency.lockutils [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "8d733f93-7636-447b-a5d5-53c16c30061f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.165921] env[61852]: DEBUG oslo_concurrency.lockutils [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "8d733f93-7636-447b-a5d5-53c16c30061f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.166108] env[61852]: DEBUG oslo_concurrency.lockutils [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "8d733f93-7636-447b-a5d5-53c16c30061f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.168071] env[61852]: INFO nova.compute.manager [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Terminating instance [ 895.169738] env[61852]: DEBUG nova.compute.manager [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 895.169935] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 895.170770] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a164c6-cb60-4c73-9977-72c79ca2bfff {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.179710] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.182027] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7777e1c8-dd19-4650-a5b8-27a48c4c9389 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.189046] env[61852]: DEBUG oslo_vmware.api [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 895.189046] env[61852]: value = "task-1293098" [ 895.189046] env[61852]: _type = "Task" [ 895.189046] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.198167] env[61852]: DEBUG oslo_vmware.api [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.233035] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8701b505-41b5-4b0c-89d2-77862e816717 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.240885] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b6a9b0-bcf0-4141-9d34-efb6b9341989 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.271187] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338fd6c6-3a49-4723-a944-9b1b24dcae2f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.279203] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d602ae-6cfa-46fd-b82e-3447177af055 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.292979] env[61852]: DEBUG nova.compute.provider_tree [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.529176] env[61852]: INFO nova.compute.manager [-] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Took 1.25 seconds to deallocate network for instance. [ 895.643114] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-4fb68588-21a8-4004-9bbc-aa1655624bcb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.643114] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-4fb68588-21a8-4004-9bbc-aa1655624bcb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.643114] env[61852]: DEBUG nova.network.neutron [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.701944] env[61852]: DEBUG oslo_vmware.api [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293098, 'name': PowerOffVM_Task, 'duration_secs': 0.234622} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.705842] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.705945] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.706335] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5673897e-77cd-4dc2-940a-0d8830e77ccc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.773498] env[61852]: DEBUG nova.compute.manager [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Received event network-vif-plugged-9dd9d717-68e4-4f79-99f4-be48ad14d8b1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 895.773752] env[61852]: DEBUG oslo_concurrency.lockutils [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] Acquiring lock "4fb68588-21a8-4004-9bbc-aa1655624bcb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.774057] env[61852]: DEBUG oslo_concurrency.lockutils [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] Lock "4fb68588-21a8-4004-9bbc-aa1655624bcb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.774197] env[61852]: DEBUG oslo_concurrency.lockutils [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] Lock "4fb68588-21a8-4004-9bbc-aa1655624bcb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.774393] env[61852]: DEBUG nova.compute.manager [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] No waiting events found dispatching network-vif-plugged-9dd9d717-68e4-4f79-99f4-be48ad14d8b1 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 895.774594] env[61852]: WARNING nova.compute.manager [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Received unexpected event network-vif-plugged-9dd9d717-68e4-4f79-99f4-be48ad14d8b1 for instance with vm_state building and task_state spawning. [ 895.774787] env[61852]: DEBUG nova.compute.manager [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Received event network-changed-9dd9d717-68e4-4f79-99f4-be48ad14d8b1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 895.774954] env[61852]: DEBUG nova.compute.manager [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Refreshing instance network info cache due to event network-changed-9dd9d717-68e4-4f79-99f4-be48ad14d8b1. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 895.775151] env[61852]: DEBUG oslo_concurrency.lockutils [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] Acquiring lock "refresh_cache-4fb68588-21a8-4004-9bbc-aa1655624bcb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.797438] env[61852]: DEBUG nova.scheduler.client.report [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 895.800682] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.801517] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.801517] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Deleting the datastore file [datastore1] 8d733f93-7636-447b-a5d5-53c16c30061f {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.801517] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64fd73da-e298-4abd-81f1-16d3bc0a3d07 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.810915] env[61852]: DEBUG oslo_vmware.api [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for the task: (returnval){ [ 895.810915] env[61852]: value = "task-1293100" [ 895.810915] env[61852]: _type = "Task" [ 895.810915] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.823743] env[61852]: DEBUG oslo_vmware.api [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.036839] env[61852]: DEBUG oslo_concurrency.lockutils [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.187398] env[61852]: DEBUG nova.network.neutron [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 896.306631] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.307201] env[61852]: DEBUG nova.compute.manager [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 896.311486] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 11.360s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.311486] env[61852]: DEBUG nova.objects.instance [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61852) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 896.324073] env[61852]: DEBUG oslo_vmware.api [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Task: {'id': task-1293100, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176606} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.324403] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.324640] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 896.324845] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.325044] env[61852]: INFO nova.compute.manager [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 896.325306] env[61852]: DEBUG oslo.service.loopingcall [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.325509] env[61852]: DEBUG nova.compute.manager [-] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 896.325606] env[61852]: DEBUG nova.network.neutron [-] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 896.356058] env[61852]: DEBUG nova.network.neutron [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Updating instance_info_cache with network_info: [{"id": "9dd9d717-68e4-4f79-99f4-be48ad14d8b1", "address": "fa:16:3e:df:ca:b9", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dd9d717-68", "ovs_interfaceid": "9dd9d717-68e4-4f79-99f4-be48ad14d8b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.649427] env[61852]: DEBUG nova.compute.manager [req-d90f18f0-b60a-4603-962c-b334655e8c34 req-88a34cb0-76e9-49fa-a039-0e7f7514bfba service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Received event network-vif-deleted-7791e47c-6084-49c7-b1c2-b28459f8f408 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.649645] env[61852]: INFO nova.compute.manager [req-d90f18f0-b60a-4603-962c-b334655e8c34 req-88a34cb0-76e9-49fa-a039-0e7f7514bfba service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Neutron deleted interface 7791e47c-6084-49c7-b1c2-b28459f8f408; detaching it from the instance and deleting it from the info cache [ 896.649856] env[61852]: DEBUG nova.network.neutron [req-d90f18f0-b60a-4603-962c-b334655e8c34 req-88a34cb0-76e9-49fa-a039-0e7f7514bfba service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.816158] env[61852]: DEBUG nova.compute.utils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 896.821916] env[61852]: DEBUG nova.compute.manager [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 896.822097] env[61852]: DEBUG nova.network.neutron [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 896.859654] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-4fb68588-21a8-4004-9bbc-aa1655624bcb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.860080] env[61852]: DEBUG nova.compute.manager [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Instance network_info: |[{"id": "9dd9d717-68e4-4f79-99f4-be48ad14d8b1", "address": "fa:16:3e:df:ca:b9", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dd9d717-68", "ovs_interfaceid": "9dd9d717-68e4-4f79-99f4-be48ad14d8b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 896.860404] env[61852]: DEBUG oslo_concurrency.lockutils [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] Acquired lock "refresh_cache-4fb68588-21a8-4004-9bbc-aa1655624bcb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.860590] env[61852]: DEBUG nova.network.neutron [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Refreshing network info cache for port 9dd9d717-68e4-4f79-99f4-be48ad14d8b1 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.861797] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:ca:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9dd9d717-68e4-4f79-99f4-be48ad14d8b1', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.870615] env[61852]: DEBUG oslo.service.loopingcall [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.871627] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.871872] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-312b6278-7999-4920-ae4c-7e5a565e207b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.889417] env[61852]: DEBUG nova.policy [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5db98c1126cc41b5930b2e5fa823c330', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '783bc6968c91488293479f10b8dc92c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 896.899719] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.899719] env[61852]: value = "task-1293101" [ 896.899719] env[61852]: _type = "Task" [ 896.899719] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.908616] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293101, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.089827] env[61852]: DEBUG nova.network.neutron [-] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.152391] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-71bb66ea-c4f3-487d-b849-9a008941d861 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.162902] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3048293-1f0c-472a-8553-55ab59059f25 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.174372] env[61852]: DEBUG nova.network.neutron [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Successfully created port: ad6990c8-dfec-404b-9e08-011ac672c222 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.193369] env[61852]: DEBUG nova.compute.manager [req-d90f18f0-b60a-4603-962c-b334655e8c34 req-88a34cb0-76e9-49fa-a039-0e7f7514bfba service nova] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Detach interface failed, port_id=7791e47c-6084-49c7-b1c2-b28459f8f408, reason: Instance 8d733f93-7636-447b-a5d5-53c16c30061f could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 897.322755] env[61852]: DEBUG nova.compute.manager [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 897.327163] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b86ac978-2c7b-4f25-bc84-77a24168fbe8 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.329099] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.385s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.331318] env[61852]: INFO nova.compute.claims [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.410505] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293101, 'name': CreateVM_Task, 'duration_secs': 0.341737} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.410686] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 897.411402] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.411577] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.411907] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 897.412185] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc28d2b3-06bb-48fa-bb7d-21aba5455975 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.418168] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquiring lock "e97448d7-0162-44bf-95d1-93bdcbcaec25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.418386] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lock "e97448d7-0162-44bf-95d1-93bdcbcaec25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.421589] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 897.421589] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52846154-3612-1fe1-884b-d4c13b13f373" [ 897.421589] env[61852]: _type = "Task" [ 897.421589] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.432580] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52846154-3612-1fe1-884b-d4c13b13f373, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.591582] env[61852]: INFO nova.compute.manager [-] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Took 1.27 seconds to deallocate network for instance. [ 897.608208] env[61852]: DEBUG nova.network.neutron [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Updated VIF entry in instance network info cache for port 9dd9d717-68e4-4f79-99f4-be48ad14d8b1. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 897.608208] env[61852]: DEBUG nova.network.neutron [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Updating instance_info_cache with network_info: [{"id": "9dd9d717-68e4-4f79-99f4-be48ad14d8b1", "address": "fa:16:3e:df:ca:b9", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dd9d717-68", "ovs_interfaceid": "9dd9d717-68e4-4f79-99f4-be48ad14d8b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.921275] env[61852]: DEBUG nova.compute.manager [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 897.933297] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52846154-3612-1fe1-884b-d4c13b13f373, 'name': SearchDatastore_Task, 'duration_secs': 0.011281} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.933594] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.933834] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.934080] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.935666] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.935666] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.935666] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-677ea44c-4e4e-4dae-a5c6-003de67428b3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.944343] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.944479] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 897.945259] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dba8dfa8-1be5-4542-b695-57b5fb20e5b1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.953496] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 897.953496] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52342849-fe12-796f-d519-e00aa47f366d" [ 897.953496] env[61852]: _type = "Task" [ 897.953496] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.961060] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52342849-fe12-796f-d519-e00aa47f366d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.101113] env[61852]: DEBUG oslo_concurrency.lockutils [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.111038] env[61852]: DEBUG oslo_concurrency.lockutils [req-76959835-937f-4e6a-b331-fc32db169e41 req-6f5c3dde-87f2-45b8-85c9-ce895ea003d9 service nova] Releasing lock "refresh_cache-4fb68588-21a8-4004-9bbc-aa1655624bcb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.335806] env[61852]: DEBUG nova.compute.manager [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 898.368463] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1bf7608631a9945df79ca0be66647a60',container_format='bare',created_at=2024-10-15T17:26:05Z,direct_url=,disk_format='vmdk',id=ca674796-50b0-4a64-90f2-d0e6a238a167,min_disk=1,min_ram=0,name='tempest-test-snap-151487598',owner='783bc6968c91488293479f10b8dc92c1',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-15T17:26:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.368843] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.370219] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.371658] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.371910] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.372161] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.372456] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.372698] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.372947] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.373200] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.373451] env[61852]: DEBUG nova.virt.hardware [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.374374] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6360de61-4c92-4661-b62b-57c33f477d3d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.387877] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b3b9a6-606d-49c6-af8b-852612a49b1a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.442652] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.463882] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52342849-fe12-796f-d519-e00aa47f366d, 'name': SearchDatastore_Task, 'duration_secs': 0.009153} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.464374] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a60f255-454e-42b9-81cf-d4165743d716 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.470775] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 898.470775] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52892b0a-d001-1dd7-7905-4f35ed551896" [ 898.470775] env[61852]: _type = "Task" [ 898.470775] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.482936] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52892b0a-d001-1dd7-7905-4f35ed551896, 'name': SearchDatastore_Task, 'duration_secs': 0.009731} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.483141] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.483578] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 4fb68588-21a8-4004-9bbc-aa1655624bcb/4fb68588-21a8-4004-9bbc-aa1655624bcb.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.483676] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00a66702-a56a-41d3-8fd3-fb367a97cace {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.490339] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 898.490339] env[61852]: value = "task-1293102" [ 898.490339] env[61852]: _type = "Task" [ 898.490339] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.499191] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.542982] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8b3ad9-4f60-4b0c-9384-ee41e596a1fd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.551174] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e92f7e-368e-42b9-9909-cb285929644d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.583377] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a05909-a5ab-4ea5-a3b8-873e23325fe0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.591430] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164e5b98-e689-4077-b46c-265d3f312016 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.605129] env[61852]: DEBUG nova.compute.provider_tree [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 898.673061] env[61852]: DEBUG nova.compute.manager [req-7aa695a4-60f7-409d-9734-58457b65934b req-eae6ad04-14f3-42a3-a792-d7f8773ec3f6 service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Received event network-vif-plugged-ad6990c8-dfec-404b-9e08-011ac672c222 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 898.673305] env[61852]: DEBUG oslo_concurrency.lockutils [req-7aa695a4-60f7-409d-9734-58457b65934b req-eae6ad04-14f3-42a3-a792-d7f8773ec3f6 service nova] Acquiring lock "6cb1968c-b951-4a83-a036-ba50b735133c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.673527] env[61852]: DEBUG oslo_concurrency.lockutils [req-7aa695a4-60f7-409d-9734-58457b65934b req-eae6ad04-14f3-42a3-a792-d7f8773ec3f6 service nova] Lock "6cb1968c-b951-4a83-a036-ba50b735133c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.673702] env[61852]: DEBUG oslo_concurrency.lockutils [req-7aa695a4-60f7-409d-9734-58457b65934b req-eae6ad04-14f3-42a3-a792-d7f8773ec3f6 service nova] Lock "6cb1968c-b951-4a83-a036-ba50b735133c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.673927] env[61852]: DEBUG nova.compute.manager [req-7aa695a4-60f7-409d-9734-58457b65934b req-eae6ad04-14f3-42a3-a792-d7f8773ec3f6 service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] No waiting events found dispatching network-vif-plugged-ad6990c8-dfec-404b-9e08-011ac672c222 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 898.674219] env[61852]: WARNING nova.compute.manager [req-7aa695a4-60f7-409d-9734-58457b65934b req-eae6ad04-14f3-42a3-a792-d7f8773ec3f6 service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Received unexpected event network-vif-plugged-ad6990c8-dfec-404b-9e08-011ac672c222 for instance with vm_state building and task_state spawning. [ 898.687507] env[61852]: DEBUG nova.network.neutron [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Successfully updated port: ad6990c8-dfec-404b-9e08-011ac672c222 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.000455] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471712} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.000718] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 4fb68588-21a8-4004-9bbc-aa1655624bcb/4fb68588-21a8-4004-9bbc-aa1655624bcb.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.000946] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.001257] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ecde63e7-0629-4458-ace4-f71f0feb1a7a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.008572] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 899.008572] env[61852]: value = "task-1293103" [ 899.008572] env[61852]: _type = "Task" [ 899.008572] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.016659] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293103, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.125395] env[61852]: ERROR nova.scheduler.client.report [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [req-7f9ae0e7-e6ce-4066-9604-c5091da73620] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7f9ae0e7-e6ce-4066-9604-c5091da73620"}]} [ 899.140461] env[61852]: DEBUG nova.scheduler.client.report [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 899.152334] env[61852]: DEBUG nova.scheduler.client.report [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 899.152538] env[61852]: DEBUG nova.compute.provider_tree [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 899.161930] env[61852]: DEBUG nova.scheduler.client.report [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 899.177627] env[61852]: DEBUG nova.scheduler.client.report [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 899.190381] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "refresh_cache-6cb1968c-b951-4a83-a036-ba50b735133c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.190560] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "refresh_cache-6cb1968c-b951-4a83-a036-ba50b735133c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.190767] env[61852]: DEBUG nova.network.neutron [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.321722] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375f089f-705c-4e53-aa2d-5f214a6a1f38 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.329787] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f103833-95de-40e1-8618-a5bafbfc473c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.362103] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fc4eba-b14a-4195-9743-a020a42b2e2c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.369554] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0bad2cd-3435-4a15-b84c-d065980d9432 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.382894] env[61852]: DEBUG nova.compute.provider_tree [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.520046] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293103, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065758} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.520046] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 899.520437] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b99b48-8dd6-40a9-aeab-f6afb21d835a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.542509] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 4fb68588-21a8-4004-9bbc-aa1655624bcb/4fb68588-21a8-4004-9bbc-aa1655624bcb.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.542777] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48fe7b98-fe5e-4f94-9342-c035dfbe3fbd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.563326] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 899.563326] env[61852]: value = "task-1293104" [ 899.563326] env[61852]: _type = "Task" [ 899.563326] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.571034] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293104, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.725117] env[61852]: DEBUG nova.network.neutron [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 899.854471] env[61852]: DEBUG nova.network.neutron [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Updating instance_info_cache with network_info: [{"id": "ad6990c8-dfec-404b-9e08-011ac672c222", "address": "fa:16:3e:02:d5:ea", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad6990c8-df", "ovs_interfaceid": "ad6990c8-dfec-404b-9e08-011ac672c222", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.885768] env[61852]: DEBUG nova.scheduler.client.report [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 900.074851] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293104, 'name': ReconfigVM_Task, 'duration_secs': 0.313367} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.075173] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 4fb68588-21a8-4004-9bbc-aa1655624bcb/4fb68588-21a8-4004-9bbc-aa1655624bcb.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.075783] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48feb12c-9536-4465-9864-6c6d489b0030 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.083544] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 900.083544] env[61852]: value = "task-1293105" [ 900.083544] env[61852]: _type = "Task" [ 900.083544] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.092252] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293105, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.358765] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "refresh_cache-6cb1968c-b951-4a83-a036-ba50b735133c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.358765] env[61852]: DEBUG nova.compute.manager [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Instance network_info: |[{"id": "ad6990c8-dfec-404b-9e08-011ac672c222", "address": "fa:16:3e:02:d5:ea", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad6990c8-df", "ovs_interfaceid": "ad6990c8-dfec-404b-9e08-011ac672c222", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 900.359333] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:d5:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3e0aae3-33d1-403b-bfaf-306f77a1422e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad6990c8-dfec-404b-9e08-011ac672c222', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 900.366884] env[61852]: DEBUG oslo.service.loopingcall [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 900.367108] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 900.367338] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4584626f-06ff-4663-9c7d-764072a0ce1a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.387092] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 900.387092] env[61852]: value = "task-1293106" [ 900.387092] env[61852]: _type = "Task" [ 900.387092] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.390548] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.061s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.391048] env[61852]: DEBUG nova.compute.manager [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 900.393576] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.005s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.393796] env[61852]: DEBUG nova.objects.instance [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lazy-loading 'resources' on Instance uuid eae1ad1f-f213-4227-93aa-b0ccf660e638 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.401196] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293106, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.594337] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293105, 'name': Rename_Task, 'duration_secs': 0.139374} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.594648] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 900.594919] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6c59609-6c77-4905-87fc-b68ae7c63d52 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.601188] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 900.601188] env[61852]: value = "task-1293107" [ 900.601188] env[61852]: _type = "Task" [ 900.601188] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.610420] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293107, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.701024] env[61852]: DEBUG nova.compute.manager [req-79e482f9-5c72-42ed-940e-e30343884a30 req-33965fae-fc27-4706-aed1-5335fa08e636 service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Received event network-changed-ad6990c8-dfec-404b-9e08-011ac672c222 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 900.701279] env[61852]: DEBUG nova.compute.manager [req-79e482f9-5c72-42ed-940e-e30343884a30 req-33965fae-fc27-4706-aed1-5335fa08e636 service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Refreshing instance network info cache due to event network-changed-ad6990c8-dfec-404b-9e08-011ac672c222. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 900.701515] env[61852]: DEBUG oslo_concurrency.lockutils [req-79e482f9-5c72-42ed-940e-e30343884a30 req-33965fae-fc27-4706-aed1-5335fa08e636 service nova] Acquiring lock "refresh_cache-6cb1968c-b951-4a83-a036-ba50b735133c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.701663] env[61852]: DEBUG oslo_concurrency.lockutils [req-79e482f9-5c72-42ed-940e-e30343884a30 req-33965fae-fc27-4706-aed1-5335fa08e636 service nova] Acquired lock "refresh_cache-6cb1968c-b951-4a83-a036-ba50b735133c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.701829] env[61852]: DEBUG nova.network.neutron [req-79e482f9-5c72-42ed-940e-e30343884a30 req-33965fae-fc27-4706-aed1-5335fa08e636 service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Refreshing network info cache for port ad6990c8-dfec-404b-9e08-011ac672c222 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 900.900135] env[61852]: DEBUG nova.compute.utils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 900.903769] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293106, 'name': CreateVM_Task, 'duration_secs': 0.342908} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.904467] env[61852]: DEBUG nova.compute.manager [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 900.904695] env[61852]: DEBUG nova.network.neutron [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 900.906394] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 900.907047] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.907206] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.907562] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 900.908130] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aee09f87-c16d-4c5e-a909-2ec3c1997466 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.915199] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 900.915199] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a258d1-ed41-a882-afe4-d33e9b5c21f1" [ 900.915199] env[61852]: _type = "Task" [ 900.915199] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.926863] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a258d1-ed41-a882-afe4-d33e9b5c21f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.950254] env[61852]: DEBUG nova.policy [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeca45e07f5b41e38b9ab8ac31bad06c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14a017ea2b084ae0ad2994dda7809c7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 901.070846] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fc2585-f544-4c5c-a97a-20e96515af0e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.079576] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630566c3-81d4-476e-8ba5-962a58e46842 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.116983] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9831853-ab76-4a8d-a676-0d0f3ecb20a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.124805] env[61852]: DEBUG oslo_vmware.api [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293107, 'name': PowerOnVM_Task, 'duration_secs': 0.490725} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.126822] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 901.127074] env[61852]: INFO nova.compute.manager [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Took 6.86 seconds to spawn the instance on the hypervisor. [ 901.127277] env[61852]: DEBUG nova.compute.manager [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 901.128041] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57247420-916e-46a9-a27e-37cfdd6c2c98 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.131198] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2333c7-20e5-430a-a7c9-726e04b2f111 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.148871] env[61852]: DEBUG nova.compute.provider_tree [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.211432] env[61852]: DEBUG nova.network.neutron [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Successfully created port: 61e94b93-d030-4c70-8ffc-ce81cbf29d01 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 901.405646] env[61852]: DEBUG nova.compute.manager [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 901.430608] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.430984] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Processing image ca674796-50b0-4a64-90f2-d0e6a238a167 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 901.431136] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167/ca674796-50b0-4a64-90f2-d0e6a238a167.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.431285] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167/ca674796-50b0-4a64-90f2-d0e6a238a167.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.431462] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 901.432184] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85a70bbc-764f-4393-b578-ecaeaf4143a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.442448] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 901.442448] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 901.444701] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4ee4cca-bae3-4aec-8835-4e784bffc340 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.450939] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 901.450939] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52452cc4-044b-0d82-14b1-b7d8df97b7bf" [ 901.450939] env[61852]: _type = "Task" [ 901.450939] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.460674] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52452cc4-044b-0d82-14b1-b7d8df97b7bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.493651] env[61852]: DEBUG nova.network.neutron [req-79e482f9-5c72-42ed-940e-e30343884a30 req-33965fae-fc27-4706-aed1-5335fa08e636 service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Updated VIF entry in instance network info cache for port ad6990c8-dfec-404b-9e08-011ac672c222. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 901.494050] env[61852]: DEBUG nova.network.neutron [req-79e482f9-5c72-42ed-940e-e30343884a30 req-33965fae-fc27-4706-aed1-5335fa08e636 service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Updating instance_info_cache with network_info: [{"id": "ad6990c8-dfec-404b-9e08-011ac672c222", "address": "fa:16:3e:02:d5:ea", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad6990c8-df", "ovs_interfaceid": "ad6990c8-dfec-404b-9e08-011ac672c222", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.655709] env[61852]: DEBUG nova.scheduler.client.report [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 901.662201] env[61852]: INFO nova.compute.manager [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Took 21.50 seconds to build instance. [ 901.914710] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "4fb68588-21a8-4004-9bbc-aa1655624bcb" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.962418] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Preparing fetch location {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 901.962867] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Fetch image to [datastore1] OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01/OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01.vmdk {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 901.962867] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Downloading stream optimized image ca674796-50b0-4a64-90f2-d0e6a238a167 to [datastore1] OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01/OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01.vmdk on the data store datastore1 as vApp {{(pid=61852) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 901.963045] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Downloading image file data ca674796-50b0-4a64-90f2-d0e6a238a167 to the ESX as VM named 'OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01' {{(pid=61852) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 902.005294] env[61852]: DEBUG oslo_concurrency.lockutils [req-79e482f9-5c72-42ed-940e-e30343884a30 req-33965fae-fc27-4706-aed1-5335fa08e636 service nova] Releasing lock "refresh_cache-6cb1968c-b951-4a83-a036-ba50b735133c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.039103] env[61852]: DEBUG oslo_vmware.rw_handles [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 902.039103] env[61852]: value = "resgroup-9" [ 902.039103] env[61852]: _type = "ResourcePool" [ 902.039103] env[61852]: }. {{(pid=61852) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 902.039423] env[61852]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f9169012-80e2-49ee-805c-23af5258107f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.061851] env[61852]: DEBUG oslo_vmware.rw_handles [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lease: (returnval){ [ 902.061851] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ff24d7-8a2f-3fc1-91b4-711b6c369a32" [ 902.061851] env[61852]: _type = "HttpNfcLease" [ 902.061851] env[61852]: } obtained for vApp import into resource pool (val){ [ 902.061851] env[61852]: value = "resgroup-9" [ 902.061851] env[61852]: _type = "ResourcePool" [ 902.061851] env[61852]: }. {{(pid=61852) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 902.062254] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the lease: (returnval){ [ 902.062254] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ff24d7-8a2f-3fc1-91b4-711b6c369a32" [ 902.062254] env[61852]: _type = "HttpNfcLease" [ 902.062254] env[61852]: } to be ready. {{(pid=61852) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 902.068831] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 902.068831] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ff24d7-8a2f-3fc1-91b4-711b6c369a32" [ 902.068831] env[61852]: _type = "HttpNfcLease" [ 902.068831] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 902.161572] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.768s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.164182] env[61852]: DEBUG oslo_concurrency.lockutils [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.056s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.164435] env[61852]: DEBUG nova.objects.instance [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lazy-loading 'resources' on Instance uuid 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.165914] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e9a536c8-86c5-4304-8fb0-582d800fa5b3 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "4fb68588-21a8-4004-9bbc-aa1655624bcb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.014s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.166217] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "4fb68588-21a8-4004-9bbc-aa1655624bcb" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.252s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.166438] env[61852]: DEBUG nova.compute.manager [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 902.167907] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b7860f-41d5-4f0a-953e-80f01325df30 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.177163] env[61852]: DEBUG nova.compute.manager [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61852) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 902.177529] env[61852]: DEBUG nova.objects.instance [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lazy-loading 'flavor' on Instance uuid 4fb68588-21a8-4004-9bbc-aa1655624bcb {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.184360] env[61852]: INFO nova.scheduler.client.report [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Deleted allocations for instance eae1ad1f-f213-4227-93aa-b0ccf660e638 [ 902.417584] env[61852]: DEBUG nova.compute.manager [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 902.438941] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 902.439220] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 902.439436] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.439562] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 902.439711] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.439866] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 902.440141] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 902.440316] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 902.440486] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 902.440654] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 902.440833] env[61852]: DEBUG nova.virt.hardware [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 902.441696] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257837ba-62cc-4329-bcab-d89fe21c24ed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.450680] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec24523f-caaa-462e-aa5e-81983b0cc8ec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.571137] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 902.571137] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ff24d7-8a2f-3fc1-91b4-711b6c369a32" [ 902.571137] env[61852]: _type = "HttpNfcLease" [ 902.571137] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 902.683684] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.683684] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d25e972-2fa0-40ae-b631-b31ce022d6ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.695208] env[61852]: DEBUG oslo_vmware.api [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 902.695208] env[61852]: value = "task-1293109" [ 902.695208] env[61852]: _type = "Task" [ 902.695208] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.696195] env[61852]: DEBUG nova.network.neutron [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Successfully updated port: 61e94b93-d030-4c70-8ffc-ce81cbf29d01 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 902.697747] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e89d4297-adcd-4cb1-9739-99d5b4d6ba0f tempest-ServersTestMultiNic-1113767641 tempest-ServersTestMultiNic-1113767641-project-member] Lock "eae1ad1f-f213-4227-93aa-b0ccf660e638" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.167s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.713267] env[61852]: DEBUG oslo_vmware.api [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.727793] env[61852]: DEBUG nova.compute.manager [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Received event network-vif-plugged-61e94b93-d030-4c70-8ffc-ce81cbf29d01 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.728034] env[61852]: DEBUG oslo_concurrency.lockutils [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] Acquiring lock "51ecc9c3-a3fc-4bd7-8c90-003451700212-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.728245] env[61852]: DEBUG oslo_concurrency.lockutils [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.728413] env[61852]: DEBUG oslo_concurrency.lockutils [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.728581] env[61852]: DEBUG nova.compute.manager [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] No waiting events found dispatching network-vif-plugged-61e94b93-d030-4c70-8ffc-ce81cbf29d01 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 902.728747] env[61852]: WARNING nova.compute.manager [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Received unexpected event network-vif-plugged-61e94b93-d030-4c70-8ffc-ce81cbf29d01 for instance with vm_state building and task_state spawning. [ 902.728944] env[61852]: DEBUG nova.compute.manager [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Received event network-changed-61e94b93-d030-4c70-8ffc-ce81cbf29d01 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.729119] env[61852]: DEBUG nova.compute.manager [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Refreshing instance network info cache due to event network-changed-61e94b93-d030-4c70-8ffc-ce81cbf29d01. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 902.729303] env[61852]: DEBUG oslo_concurrency.lockutils [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] Acquiring lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.729441] env[61852]: DEBUG oslo_concurrency.lockutils [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] Acquired lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.729597] env[61852]: DEBUG nova.network.neutron [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Refreshing network info cache for port 61e94b93-d030-4c70-8ffc-ce81cbf29d01 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 902.847692] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3376926a-3de9-455b-b0ec-127630ca4f13 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.856292] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0733edf6-7739-4d78-9891-4a4002f460ca {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.887485] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773b714a-e31d-44b8-8b82-bf922f2981ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.896670] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff7a0b2-317b-4e7f-8b18-9340969f16dc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.911907] env[61852]: DEBUG nova.compute.provider_tree [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.071451] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 903.071451] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ff24d7-8a2f-3fc1-91b4-711b6c369a32" [ 903.071451] env[61852]: _type = "HttpNfcLease" [ 903.071451] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 903.199436] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.209487] env[61852]: DEBUG oslo_vmware.api [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293109, 'name': PowerOffVM_Task, 'duration_secs': 0.181612} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.209791] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.209985] env[61852]: DEBUG nova.compute.manager [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 903.210793] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590360d0-eb51-412c-97cb-b5eb2c8cf8c6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.277101] env[61852]: DEBUG nova.network.neutron [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 903.411045] env[61852]: DEBUG nova.network.neutron [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.415520] env[61852]: DEBUG nova.scheduler.client.report [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 903.571090] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 903.571090] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ff24d7-8a2f-3fc1-91b4-711b6c369a32" [ 903.571090] env[61852]: _type = "HttpNfcLease" [ 903.571090] env[61852]: } is ready. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 903.571503] env[61852]: DEBUG oslo_vmware.rw_handles [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 903.571503] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ff24d7-8a2f-3fc1-91b4-711b6c369a32" [ 903.571503] env[61852]: _type = "HttpNfcLease" [ 903.571503] env[61852]: }. {{(pid=61852) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 903.572110] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b11e8a5-d1a3-4491-b5b0-875480b00681 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.579655] env[61852]: DEBUG oslo_vmware.rw_handles [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292b03f-1b66-22c4-b3c2-d3291ad45e55/disk-0.vmdk from lease info. {{(pid=61852) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 903.579805] env[61852]: DEBUG oslo_vmware.rw_handles [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292b03f-1b66-22c4-b3c2-d3291ad45e55/disk-0.vmdk. {{(pid=61852) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 903.643383] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9986f784-1de8-4677-97c8-7207d7f6444e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.722788] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d8efef73-45ad-44b6-8030-6a201df21454 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "4fb68588-21a8-4004-9bbc-aa1655624bcb" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.556s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.913397] env[61852]: DEBUG oslo_concurrency.lockutils [req-829ee956-0688-4a72-bb4b-2bb048e3bd04 req-521e8066-ddb9-4d0d-b144-1986259df112 service nova] Releasing lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.914810] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.914984] env[61852]: DEBUG nova.network.neutron [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 903.919591] env[61852]: DEBUG oslo_concurrency.lockutils [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.755s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.922823] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.902s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.945126] env[61852]: INFO nova.scheduler.client.report [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted allocations for instance 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92 [ 904.345578] env[61852]: DEBUG oslo_vmware.rw_handles [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Completed reading data from the image iterator. {{(pid=61852) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 904.345807] env[61852]: DEBUG oslo_vmware.rw_handles [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292b03f-1b66-22c4-b3c2-d3291ad45e55/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 904.346759] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29e92a9-4516-4d2e-bc3f-89795e26cea1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.354174] env[61852]: DEBUG oslo_vmware.rw_handles [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292b03f-1b66-22c4-b3c2-d3291ad45e55/disk-0.vmdk is in state: ready. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 904.354379] env[61852]: DEBUG oslo_vmware.rw_handles [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292b03f-1b66-22c4-b3c2-d3291ad45e55/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 904.354638] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-00231295-1922-4f21-af83-4742c1a881e4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.358754] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "4fb68588-21a8-4004-9bbc-aa1655624bcb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.359007] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "4fb68588-21a8-4004-9bbc-aa1655624bcb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.359229] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "4fb68588-21a8-4004-9bbc-aa1655624bcb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.359415] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "4fb68588-21a8-4004-9bbc-aa1655624bcb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.359600] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "4fb68588-21a8-4004-9bbc-aa1655624bcb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.361756] env[61852]: INFO nova.compute.manager [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Terminating instance [ 904.363782] env[61852]: DEBUG nova.compute.manager [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 904.364057] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 904.364962] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b916be5-925c-4267-8c49-ec688c848a1e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.373379] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 904.373634] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36b1ca54-dd85-4a5d-842e-231e80a2b509 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.440041] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 904.440041] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 904.440273] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleting the datastore file [datastore1] 4fb68588-21a8-4004-9bbc-aa1655624bcb {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 904.440454] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-849b18b0-0577-45c6-b433-9870ecb2aef2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.447565] env[61852]: DEBUG oslo_vmware.api [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 904.447565] env[61852]: value = "task-1293111" [ 904.447565] env[61852]: _type = "Task" [ 904.447565] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.454637] env[61852]: DEBUG oslo_concurrency.lockutils [None req-81a422f2-ce90-46d6-a675-275803516956 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "4b85f2d7-d99a-4332-a78c-3f2a50c7cb92" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.789s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.459226] env[61852]: DEBUG nova.network.neutron [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 904.462818] env[61852]: DEBUG oslo_vmware.api [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.519252] env[61852]: DEBUG oslo_vmware.rw_handles [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5292b03f-1b66-22c4-b3c2-d3291ad45e55/disk-0.vmdk. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 904.519556] env[61852]: INFO nova.virt.vmwareapi.images [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Downloaded image file data ca674796-50b0-4a64-90f2-d0e6a238a167 [ 904.520785] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2f1654-765c-404b-865d-13e55913c4b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.539643] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e3dfeb3-2faf-474c-8381-f30cabb96815 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.595451] env[61852]: INFO nova.virt.vmwareapi.images [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] The imported VM was unregistered [ 904.598601] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Caching image {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 904.599061] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating directory with path [datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167 {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 904.599449] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1f1fc79-0421-4eb6-a839-46a2e6bb4661 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.616177] env[61852]: DEBUG nova.network.neutron [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance_info_cache with network_info: [{"id": "61e94b93-d030-4c70-8ffc-ce81cbf29d01", "address": "fa:16:3e:23:a2:0c", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61e94b93-d0", "ovs_interfaceid": "61e94b93-d030-4c70-8ffc-ce81cbf29d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.630940] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Created directory with path [datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167 {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 904.631155] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01/OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01.vmdk to [datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167/ca674796-50b0-4a64-90f2-d0e6a238a167.vmdk. {{(pid=61852) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 904.631417] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-a974299e-e0bf-4fb1-ad4d-a95d2cbecee6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.640921] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 904.640921] env[61852]: value = "task-1293113" [ 904.640921] env[61852]: _type = "Task" [ 904.640921] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.649981] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293113, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.956261] env[61852]: WARNING nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance f18906e9-67b3-4537-9169-9d275e2ec4e4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 904.956533] env[61852]: WARNING nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8d733f93-7636-447b-a5d5-53c16c30061f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 904.956668] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8d8679db-eb9d-45c1-b053-70378f58e273 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.956790] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 254919cb-e3cd-4288-8696-95e632d78a38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.956921] env[61852]: WARNING nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 12e431d3-4c23-4f4c-a619-f0b69a0e31e8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 904.957098] env[61852]: WARNING nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance b44c9cc0-3f2b-495a-87ee-f03de8dcec3c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 904.957234] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 4fb68588-21a8-4004-9bbc-aa1655624bcb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.957353] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 6cb1968c-b951-4a83-a036-ba50b735133c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.957466] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 51ecc9c3-a3fc-4bd7-8c90-003451700212 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.962448] env[61852]: DEBUG oslo_vmware.api [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.41593} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.962781] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.963079] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.963278] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.963790] env[61852]: INFO nova.compute.manager [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Took 0.60 seconds to destroy the instance on the hypervisor. [ 904.963790] env[61852]: DEBUG oslo.service.loopingcall [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.964064] env[61852]: DEBUG nova.compute.manager [-] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 904.964102] env[61852]: DEBUG nova.network.neutron [-] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 905.119334] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.119661] env[61852]: DEBUG nova.compute.manager [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Instance network_info: |[{"id": "61e94b93-d030-4c70-8ffc-ce81cbf29d01", "address": "fa:16:3e:23:a2:0c", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61e94b93-d0", "ovs_interfaceid": "61e94b93-d030-4c70-8ffc-ce81cbf29d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 905.120144] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:a2:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61e94b93-d030-4c70-8ffc-ce81cbf29d01', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.128205] env[61852]: DEBUG oslo.service.loopingcall [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 905.128999] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.129789] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a39d88bc-d122-413a-a4ec-26cf45ba681a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.157974] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293113, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.159678] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.159678] env[61852]: value = "task-1293114" [ 905.159678] env[61852]: _type = "Task" [ 905.159678] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.170886] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293114, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.465849] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 4623565b-cd36-498c-a0e9-c3b1c6ef479b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 905.538843] env[61852]: DEBUG nova.compute.manager [req-06e620fe-6fc8-4110-9eb0-c69dd8771ba4 req-6cc09ecc-1e2c-457d-a2e7-634cfb75fab0 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Received event network-vif-deleted-9dd9d717-68e4-4f79-99f4-be48ad14d8b1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 905.538843] env[61852]: INFO nova.compute.manager [req-06e620fe-6fc8-4110-9eb0-c69dd8771ba4 req-6cc09ecc-1e2c-457d-a2e7-634cfb75fab0 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Neutron deleted interface 9dd9d717-68e4-4f79-99f4-be48ad14d8b1; detaching it from the instance and deleting it from the info cache [ 905.538843] env[61852]: DEBUG nova.network.neutron [req-06e620fe-6fc8-4110-9eb0-c69dd8771ba4 req-6cc09ecc-1e2c-457d-a2e7-634cfb75fab0 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.674313] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293113, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.684048] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293114, 'name': CreateVM_Task, 'duration_secs': 0.430596} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.684048] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.684048] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.684048] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.684048] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 905.684048] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7304818d-2525-4124-86b4-defb32e8bd8d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.690468] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 905.690468] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525cff00-d847-3c8f-730a-9b6b38c1dc43" [ 905.690468] env[61852]: _type = "Task" [ 905.690468] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.699386] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525cff00-d847-3c8f-730a-9b6b38c1dc43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.967958] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance e97448d7-0162-44bf-95d1-93bdcbcaec25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 905.968281] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 905.968443] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 906.011399] env[61852]: DEBUG nova.network.neutron [-] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.043089] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df492d38-1465-4ade-aa8d-2496c95b97a7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.057151] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c13b82-91b9-4219-942a-93337716dc40 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.090937] env[61852]: DEBUG nova.compute.manager [req-06e620fe-6fc8-4110-9eb0-c69dd8771ba4 req-6cc09ecc-1e2c-457d-a2e7-634cfb75fab0 service nova] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Detach interface failed, port_id=9dd9d717-68e4-4f79-99f4-be48ad14d8b1, reason: Instance 4fb68588-21a8-4004-9bbc-aa1655624bcb could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 906.162234] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293113, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.182310] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce0e070-9f9a-42c3-bf64-87bff299a6bd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.189822] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cfe3bbf-04c9-45c1-b560-a79b2bde274f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.233884] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26692472-0937-457b-96ab-82b3101ae4a3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.237478] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525cff00-d847-3c8f-730a-9b6b38c1dc43, 'name': SearchDatastore_Task, 'duration_secs': 0.086247} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.238039] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.238505] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.238902] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.239197] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.239525] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.240336] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15bee84d-e72b-4b1a-a32e-e3623ce13c4d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.247950] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4d4f4c-0db3-47fd-bbe6-65b718f64601 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.266143] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.267484] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.267877] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.269187] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-152f7bc2-334d-4803-991e-86301f36ddc0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.276634] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 906.276634] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5253c76c-6d0e-a6cd-e993-b3cab017d90e" [ 906.276634] env[61852]: _type = "Task" [ 906.276634] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.289171] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5253c76c-6d0e-a6cd-e993-b3cab017d90e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.515035] env[61852]: INFO nova.compute.manager [-] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Took 1.55 seconds to deallocate network for instance. [ 906.661822] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293113, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.771048] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 906.788528] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5253c76c-6d0e-a6cd-e993-b3cab017d90e, 'name': SearchDatastore_Task, 'duration_secs': 0.090135} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.789431] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a10365ea-86fc-4cf2-a274-d4e4c35dcf64 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.798912] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 906.798912] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d5748d-09e9-9892-d175-d7c46e373f98" [ 906.798912] env[61852]: _type = "Task" [ 906.798912] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.813512] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d5748d-09e9-9892-d175-d7c46e373f98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.021338] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.161431] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293113, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.421583} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.161709] env[61852]: INFO nova.virt.vmwareapi.ds_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01/OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01.vmdk to [datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167/ca674796-50b0-4a64-90f2-d0e6a238a167.vmdk. [ 907.161909] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Cleaning up location [datastore1] OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 907.162094] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_4fbd0340-a284-4ec4-b758-736509ef4c01 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.162357] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-192758ae-534c-4487-bbb7-daf07cb18709 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.168682] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 907.168682] env[61852]: value = "task-1293115" [ 907.168682] env[61852]: _type = "Task" [ 907.168682] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.176397] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.276562] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 907.276718] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.354s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.276997] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.430s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.278656] env[61852]: INFO nova.compute.claims [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.310283] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d5748d-09e9-9892-d175-d7c46e373f98, 'name': SearchDatastore_Task, 'duration_secs': 0.081227} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.310571] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.310836] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 51ecc9c3-a3fc-4bd7-8c90-003451700212/51ecc9c3-a3fc-4bd7-8c90-003451700212.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 907.311121] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79d3bb5e-f92b-461f-b7f6-8b934ecb5b07 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.318564] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 907.318564] env[61852]: value = "task-1293116" [ 907.318564] env[61852]: _type = "Task" [ 907.318564] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.327583] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.680455] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174688} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.680858] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.681093] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167/ca674796-50b0-4a64-90f2-d0e6a238a167.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.681353] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167/ca674796-50b0-4a64-90f2-d0e6a238a167.vmdk to [datastore1] 6cb1968c-b951-4a83-a036-ba50b735133c/6cb1968c-b951-4a83-a036-ba50b735133c.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 907.681646] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9254812-6f8f-487e-8ae3-cf30ef9028fa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.690797] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 907.690797] env[61852]: value = "task-1293117" [ 907.690797] env[61852]: _type = "Task" [ 907.690797] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.699763] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.834673] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293116, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.203207] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293117, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.333202] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293116, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544018} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.333521] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 51ecc9c3-a3fc-4bd7-8c90-003451700212/51ecc9c3-a3fc-4bd7-8c90-003451700212.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 908.333758] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 908.334080] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93f33dcc-abf1-4f1d-9d93-99ccd6ddbe7c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.343476] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 908.343476] env[61852]: value = "task-1293118" [ 908.343476] env[61852]: _type = "Task" [ 908.343476] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.355703] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293118, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.461755] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b608f6f-c558-4308-b87b-03279a49d194 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.470511] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530f17c4-d309-40d3-9d04-148a0961ea8b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.507588] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a0d902-663a-4fc0-9a97-e221ee85aceb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.518059] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f495478d-b435-4a0d-b40f-bbabef5262e6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.536155] env[61852]: DEBUG nova.compute.provider_tree [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.702478] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293117, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.853990] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293118, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.137989} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.854408] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.855135] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856841c7-917d-482b-a997-d6484985ec46 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.879327] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 51ecc9c3-a3fc-4bd7-8c90-003451700212/51ecc9c3-a3fc-4bd7-8c90-003451700212.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.879675] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc25ef5e-2486-4db1-8686-9be300d2887a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.900351] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 908.900351] env[61852]: value = "task-1293119" [ 908.900351] env[61852]: _type = "Task" [ 908.900351] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.915263] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293119, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.040404] env[61852]: DEBUG nova.scheduler.client.report [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.202349] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293117, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.410763] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293119, 'name': ReconfigVM_Task, 'duration_secs': 0.304072} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.411637] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 51ecc9c3-a3fc-4bd7-8c90-003451700212/51ecc9c3-a3fc-4bd7-8c90-003451700212.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.412045] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31720a72-24a2-40f1-af64-4c0eff5985af {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.419495] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 909.419495] env[61852]: value = "task-1293120" [ 909.419495] env[61852]: _type = "Task" [ 909.419495] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.433348] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293120, 'name': Rename_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.545606] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.546201] env[61852]: DEBUG nova.compute.manager [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 909.549123] env[61852]: DEBUG oslo_concurrency.lockutils [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.353s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.549342] env[61852]: DEBUG oslo_concurrency.lockutils [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.551382] env[61852]: DEBUG oslo_concurrency.lockutils [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.379s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.551588] env[61852]: DEBUG oslo_concurrency.lockutils [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.553457] env[61852]: DEBUG oslo_concurrency.lockutils [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.517s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.553630] env[61852]: DEBUG oslo_concurrency.lockutils [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.555546] env[61852]: DEBUG oslo_concurrency.lockutils [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.455s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.555770] env[61852]: DEBUG oslo_concurrency.lockutils [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.557640] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.115s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.559573] env[61852]: INFO nova.compute.claims [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.584282] env[61852]: INFO nova.scheduler.client.report [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Deleted allocations for instance 8d733f93-7636-447b-a5d5-53c16c30061f [ 909.587990] env[61852]: INFO nova.scheduler.client.report [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted allocations for instance b44c9cc0-3f2b-495a-87ee-f03de8dcec3c [ 909.597825] env[61852]: INFO nova.scheduler.client.report [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Deleted allocations for instance f18906e9-67b3-4537-9169-9d275e2ec4e4 [ 909.599441] env[61852]: INFO nova.scheduler.client.report [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleted allocations for instance 12e431d3-4c23-4f4c-a619-f0b69a0e31e8 [ 909.702802] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293117, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.930292] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293120, 'name': Rename_Task, 'duration_secs': 0.146702} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.930677] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.930795] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca655a8b-2b19-4691-a647-59f9b04ab7cc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.937053] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 909.937053] env[61852]: value = "task-1293121" [ 909.937053] env[61852]: _type = "Task" [ 909.937053] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.945313] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.065395] env[61852]: DEBUG nova.compute.utils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 910.069687] env[61852]: DEBUG nova.compute.manager [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 910.069687] env[61852]: DEBUG nova.network.neutron [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 910.097201] env[61852]: DEBUG oslo_concurrency.lockutils [None req-60f2f3e4-66a7-49a9-8121-cff3dc7481cb tempest-ServerRescueTestJSON-832936413 tempest-ServerRescueTestJSON-832936413-project-member] Lock "8d733f93-7636-447b-a5d5-53c16c30061f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.932s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.099639] env[61852]: DEBUG oslo_concurrency.lockutils [None req-da3547a8-ad70-44f2-bf0f-d9ea3547cce9 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "b44c9cc0-3f2b-495a-87ee-f03de8dcec3c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.958s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.105776] env[61852]: DEBUG oslo_concurrency.lockutils [None req-eab8cd18-6fa4-43f5-b0a3-9b08eaab6bf0 tempest-ListImageFiltersTestJSON-485202078 tempest-ListImageFiltersTestJSON-485202078-project-member] Lock "f18906e9-67b3-4537-9169-9d275e2ec4e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.064s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.108796] env[61852]: DEBUG oslo_concurrency.lockutils [None req-16d10ee7-ec5f-45c2-9507-f8785a83d795 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "12e431d3-4c23-4f4c-a619-f0b69a0e31e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.227s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.116920] env[61852]: DEBUG nova.policy [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f04d129452d4eb79514c52a6972af0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e93a6965a6884292bc56b01f7d54a622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 910.204337] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293117, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.368886} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.204608] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ca674796-50b0-4a64-90f2-d0e6a238a167/ca674796-50b0-4a64-90f2-d0e6a238a167.vmdk to [datastore1] 6cb1968c-b951-4a83-a036-ba50b735133c/6cb1968c-b951-4a83-a036-ba50b735133c.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 910.205449] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875a32d1-442e-4f2e-8367-af3521ea21f5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.230038] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 6cb1968c-b951-4a83-a036-ba50b735133c/6cb1968c-b951-4a83-a036-ba50b735133c.vmdk or device None with type streamOptimized {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.230413] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-041f8ccb-3523-4f35-9833-b9d0d8eeaee7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.251225] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 910.251225] env[61852]: value = "task-1293122" [ 910.251225] env[61852]: _type = "Task" [ 910.251225] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.259715] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293122, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.419629] env[61852]: DEBUG nova.network.neutron [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Successfully created port: 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.448042] env[61852]: DEBUG oslo_vmware.api [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293121, 'name': PowerOnVM_Task, 'duration_secs': 0.465323} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.448307] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.448548] env[61852]: INFO nova.compute.manager [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Took 8.03 seconds to spawn the instance on the hypervisor. [ 910.448758] env[61852]: DEBUG nova.compute.manager [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 910.450071] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7038e641-9b9e-444b-8bb0-b382b20a077c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.569699] env[61852]: DEBUG nova.compute.manager [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 910.708620] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee1f5ee-f790-4d97-942c-07175c6723c7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.716297] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc67e8f7-1b3b-4214-be8b-c12ea2bf8e2d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.747942] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4f02b8-a9b4-4273-bc5c-e2d6705d4d48 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.761129] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6deea681-5250-4790-a897-7dc85f6a0db2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.769214] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293122, 'name': ReconfigVM_Task, 'duration_secs': 0.288212} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.769897] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 6cb1968c-b951-4a83-a036-ba50b735133c/6cb1968c-b951-4a83-a036-ba50b735133c.vmdk or device None with type streamOptimized {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.770536] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3a6d4ef-b75a-4016-8925-e20a7e22c979 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.780962] env[61852]: DEBUG nova.compute.provider_tree [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.787112] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 910.787112] env[61852]: value = "task-1293123" [ 910.787112] env[61852]: _type = "Task" [ 910.787112] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.800954] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293123, 'name': Rename_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.981747] env[61852]: INFO nova.compute.manager [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Took 24.05 seconds to build instance. [ 911.178918] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "cc5e0467-2960-43a1-bd7b-a528d5788028" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.179280] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "cc5e0467-2960-43a1-bd7b-a528d5788028" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.284378] env[61852]: DEBUG nova.scheduler.client.report [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.297851] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293123, 'name': Rename_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.482557] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e63ea533-2465-40f9-bbdb-a881f10aa553 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.574s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.585758] env[61852]: DEBUG nova.compute.manager [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 911.613310] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 911.613618] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 911.613817] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.614057] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 911.614259] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.614454] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 911.614709] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 911.614942] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 911.615219] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 911.615457] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 911.615678] env[61852]: DEBUG nova.virt.hardware [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 911.616630] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ab3e15-8d07-4f93-993f-25639fcaee38 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.625213] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e40993-4c31-4499-85d0-a4011f4d74c8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.682659] env[61852]: DEBUG nova.compute.manager [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 911.793498] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.794013] env[61852]: DEBUG nova.compute.manager [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 911.807228] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.786s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.807665] env[61852]: DEBUG nova.objects.instance [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lazy-loading 'resources' on Instance uuid 4fb68588-21a8-4004-9bbc-aa1655624bcb {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.808809] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293123, 'name': Rename_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.995148] env[61852]: DEBUG nova.compute.manager [req-2ed9f504-b471-4b6b-a3f0-5fabcb6efcd4 req-46187cd9-9133-44e1-a58b-5a10a6430eab service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received event network-vif-plugged-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 911.995577] env[61852]: DEBUG oslo_concurrency.lockutils [req-2ed9f504-b471-4b6b-a3f0-5fabcb6efcd4 req-46187cd9-9133-44e1-a58b-5a10a6430eab service nova] Acquiring lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.996163] env[61852]: DEBUG oslo_concurrency.lockutils [req-2ed9f504-b471-4b6b-a3f0-5fabcb6efcd4 req-46187cd9-9133-44e1-a58b-5a10a6430eab service nova] Lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.996877] env[61852]: DEBUG oslo_concurrency.lockutils [req-2ed9f504-b471-4b6b-a3f0-5fabcb6efcd4 req-46187cd9-9133-44e1-a58b-5a10a6430eab service nova] Lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.998057] env[61852]: DEBUG nova.compute.manager [req-2ed9f504-b471-4b6b-a3f0-5fabcb6efcd4 req-46187cd9-9133-44e1-a58b-5a10a6430eab service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] No waiting events found dispatching network-vif-plugged-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 911.998057] env[61852]: WARNING nova.compute.manager [req-2ed9f504-b471-4b6b-a3f0-5fabcb6efcd4 req-46187cd9-9133-44e1-a58b-5a10a6430eab service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received unexpected event network-vif-plugged-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 for instance with vm_state building and task_state spawning. [ 912.178333] env[61852]: DEBUG nova.network.neutron [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Successfully updated port: 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 912.215815] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.298813] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293123, 'name': Rename_Task, 'duration_secs': 1.13159} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.299424] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 912.299738] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ccb2bc0-5512-4b47-95f5-3af18c9e58b0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.306950] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 912.306950] env[61852]: value = "task-1293124" [ 912.306950] env[61852]: _type = "Task" [ 912.306950] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.313494] env[61852]: DEBUG nova.compute.utils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 912.320692] env[61852]: DEBUG nova.compute.manager [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 912.321303] env[61852]: DEBUG nova.network.neutron [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 912.331179] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293124, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.373024] env[61852]: DEBUG nova.policy [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7433e5ca232a4b7293cea2ff719194e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9433be55641842fcade88f4bc39303fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 912.460714] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a26e41-4e27-4d83-a916-11f5da4614df {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.468135] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66decbfd-2ac4-45a8-a9dd-a0a560996901 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.501670] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf35278-82c2-4905-ba2c-e6e26a36b953 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.509972] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97afe5d3-f1aa-4c8b-b6ef-28ba13f85f2b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.528148] env[61852]: DEBUG nova.compute.provider_tree [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.684387] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.684387] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.684471] env[61852]: DEBUG nova.network.neutron [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.710062] env[61852]: DEBUG nova.network.neutron [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Successfully created port: 2d382abe-68f4-4b6f-a534-81e74a2503ef {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.818037] env[61852]: DEBUG oslo_vmware.api [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293124, 'name': PowerOnVM_Task, 'duration_secs': 0.475113} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.818808] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 912.819113] env[61852]: INFO nova.compute.manager [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Took 14.48 seconds to spawn the instance on the hypervisor. [ 912.819397] env[61852]: DEBUG nova.compute.manager [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 912.820281] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d85a45a-d81c-4f6c-8221-1bd4bdabd90f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.823296] env[61852]: DEBUG nova.compute.manager [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 913.032414] env[61852]: DEBUG nova.scheduler.client.report [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 913.248733] env[61852]: DEBUG nova.network.neutron [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.358043] env[61852]: INFO nova.compute.manager [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Took 30.79 seconds to build instance. [ 913.359208] env[61852]: DEBUG nova.compute.manager [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Stashing vm_state: active {{(pid=61852) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 913.537862] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.731s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.542103] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.326s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.543964] env[61852]: INFO nova.compute.claims [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.578908] env[61852]: INFO nova.scheduler.client.report [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted allocations for instance 4fb68588-21a8-4004-9bbc-aa1655624bcb [ 913.654832] env[61852]: DEBUG nova.network.neutron [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [{"id": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "address": "fa:16:3e:b2:2d:44", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab757ae-eb", "ovs_interfaceid": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.843714] env[61852]: DEBUG nova.compute.manager [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 913.866762] env[61852]: DEBUG oslo_concurrency.lockutils [None req-78cbdc0c-61c9-42ab-bc8c-64a12b7cd21a tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "6cb1968c-b951-4a83-a036-ba50b735133c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.309s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.877504] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 913.877837] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 913.878313] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.878568] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 913.878757] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.878922] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 913.879180] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 913.879369] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 913.880327] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 913.881032] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 913.881032] env[61852]: DEBUG nova.virt.hardware [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 913.882338] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39715ab6-dd33-4909-8651-ddfad59e38f1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.890062] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.894284] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e2311e-c6ab-4f2a-9180-f882f45ce8a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.025617] env[61852]: DEBUG nova.compute.manager [req-db88f7e5-98fe-4ef9-b330-dd94e149eb70 req-3ff550ae-2f82-4ec4-8e7f-6572e03e4973 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received event network-changed-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 914.025839] env[61852]: DEBUG nova.compute.manager [req-db88f7e5-98fe-4ef9-b330-dd94e149eb70 req-3ff550ae-2f82-4ec4-8e7f-6572e03e4973 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing instance network info cache due to event network-changed-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 914.026098] env[61852]: DEBUG oslo_concurrency.lockutils [req-db88f7e5-98fe-4ef9-b330-dd94e149eb70 req-3ff550ae-2f82-4ec4-8e7f-6572e03e4973 service nova] Acquiring lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.092743] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0162e782-4d25-42a9-8403-6de900395479 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "4fb68588-21a8-4004-9bbc-aa1655624bcb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.734s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.158457] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.158457] env[61852]: DEBUG nova.compute.manager [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Instance network_info: |[{"id": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "address": "fa:16:3e:b2:2d:44", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab757ae-eb", "ovs_interfaceid": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 914.158607] env[61852]: DEBUG oslo_concurrency.lockutils [req-db88f7e5-98fe-4ef9-b330-dd94e149eb70 req-3ff550ae-2f82-4ec4-8e7f-6572e03e4973 service nova] Acquired lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.159476] env[61852]: DEBUG nova.network.neutron [req-db88f7e5-98fe-4ef9-b330-dd94e149eb70 req-3ff550ae-2f82-4ec4-8e7f-6572e03e4973 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing network info cache for port 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 914.160887] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:2d:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.170373] env[61852]: DEBUG oslo.service.loopingcall [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.170969] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 914.171142] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48e860b9-afd5-454f-a9e6-0172f37ce0fd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.194020] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.194020] env[61852]: value = "task-1293125" [ 914.194020] env[61852]: _type = "Task" [ 914.194020] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.203197] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293125, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.431398] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "6cb1968c-b951-4a83-a036-ba50b735133c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.431782] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "6cb1968c-b951-4a83-a036-ba50b735133c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.431960] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "6cb1968c-b951-4a83-a036-ba50b735133c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.432172] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "6cb1968c-b951-4a83-a036-ba50b735133c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.432360] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "6cb1968c-b951-4a83-a036-ba50b735133c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.434732] env[61852]: INFO nova.compute.manager [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Terminating instance [ 914.437681] env[61852]: DEBUG nova.compute.manager [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 914.437950] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.439381] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900d7cf3-a4c9-4374-85bc-53eb338f6161 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.447359] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.447632] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c47d14bb-77fd-4f83-8603-a9d8227e9c24 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.451710] env[61852]: DEBUG nova.network.neutron [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Successfully updated port: 2d382abe-68f4-4b6f-a534-81e74a2503ef {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.459028] env[61852]: DEBUG oslo_vmware.api [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 914.459028] env[61852]: value = "task-1293126" [ 914.459028] env[61852]: _type = "Task" [ 914.459028] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.470621] env[61852]: DEBUG oslo_vmware.api [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293126, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.706851] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293125, 'name': CreateVM_Task, 'duration_secs': 0.478299} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.709240] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.714021] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.714021] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.714021] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 914.714021] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c58c49fc-93e3-493a-884f-d47d352d1a80 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.718589] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 914.718589] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529a74e7-3003-165c-45b4-1e57b451c178" [ 914.718589] env[61852]: _type = "Task" [ 914.718589] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.731973] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529a74e7-3003-165c-45b4-1e57b451c178, 'name': SearchDatastore_Task, 'duration_secs': 0.009691} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.732741] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.732741] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.732883] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.733025] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.734084] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.734084] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8735d30-c5d3-41f7-a737-5daa3946f9cf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.743607] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.743811] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.744916] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ba23223-73b0-44a6-9c54-c7ab242f00cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.749683] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 914.749683] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c5f569-8468-3e5d-baa8-daa322964c9e" [ 914.749683] env[61852]: _type = "Task" [ 914.749683] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.756615] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99aad9f-b5f5-4d8f-98f1-63f25dc5080f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.764939] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c5f569-8468-3e5d-baa8-daa322964c9e, 'name': SearchDatastore_Task, 'duration_secs': 0.007615} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.767435] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b33b1bcc-2f7f-422f-8cbc-f41cc8469a0f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.770697] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2160886-9298-4cf7-922e-39bae4337a2c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.777920] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 914.777920] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523c3f12-3caf-330b-d72d-d3fee161a9f1" [ 914.777920] env[61852]: _type = "Task" [ 914.777920] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.811212] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80545ac6-7c2a-44a7-a31b-e5775454f44f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.819660] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523c3f12-3caf-330b-d72d-d3fee161a9f1, 'name': SearchDatastore_Task, 'duration_secs': 0.008839} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.821851] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.822176] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 4623565b-cd36-498c-a0e9-c3b1c6ef479b/4623565b-cd36-498c-a0e9-c3b1c6ef479b.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 914.822501] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-996a8843-0f38-4685-8703-02d5a9db3b35 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.825582] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebc9429-9f30-4c30-8b02-e7f1b223fd84 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.841752] env[61852]: DEBUG nova.compute.provider_tree [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.844503] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 914.844503] env[61852]: value = "task-1293127" [ 914.844503] env[61852]: _type = "Task" [ 914.844503] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.852562] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.954548] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquiring lock "refresh_cache-e97448d7-0162-44bf-95d1-93bdcbcaec25" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.958019] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquired lock "refresh_cache-e97448d7-0162-44bf-95d1-93bdcbcaec25" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.958019] env[61852]: DEBUG nova.network.neutron [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 914.966361] env[61852]: DEBUG oslo_vmware.api [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293126, 'name': PowerOffVM_Task, 'duration_secs': 0.226185} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.966663] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 914.966873] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 914.967176] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a62fbbb7-8d01-4d0b-bb80-5c47f42fdc6f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.052569] env[61852]: DEBUG nova.network.neutron [req-db88f7e5-98fe-4ef9-b330-dd94e149eb70 req-3ff550ae-2f82-4ec4-8e7f-6572e03e4973 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updated VIF entry in instance network info cache for port 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 915.053095] env[61852]: DEBUG nova.network.neutron [req-db88f7e5-98fe-4ef9-b330-dd94e149eb70 req-3ff550ae-2f82-4ec4-8e7f-6572e03e4973 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [{"id": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "address": "fa:16:3e:b2:2d:44", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab757ae-eb", "ovs_interfaceid": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.088033] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.088415] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.088676] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleting the datastore file [datastore1] 6cb1968c-b951-4a83-a036-ba50b735133c {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.088981] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bab0e656-006e-4202-9c10-55d16c1054d4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.097624] env[61852]: DEBUG oslo_vmware.api [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 915.097624] env[61852]: value = "task-1293129" [ 915.097624] env[61852]: _type = "Task" [ 915.097624] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.109144] env[61852]: DEBUG oslo_vmware.api [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.346085] env[61852]: DEBUG nova.scheduler.client.report [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 915.360015] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478767} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.360257] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 4623565b-cd36-498c-a0e9-c3b1c6ef479b/4623565b-cd36-498c-a0e9-c3b1c6ef479b.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 915.364018] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 915.364018] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aec4d8eb-7c70-4862-9bef-8e7d9ca9f364 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.367376] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 915.367376] env[61852]: value = "task-1293130" [ 915.367376] env[61852]: _type = "Task" [ 915.367376] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.377164] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.500435] env[61852]: DEBUG nova.network.neutron [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.524967] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.524967] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.555830] env[61852]: DEBUG oslo_concurrency.lockutils [req-db88f7e5-98fe-4ef9-b330-dd94e149eb70 req-3ff550ae-2f82-4ec4-8e7f-6572e03e4973 service nova] Releasing lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.610671] env[61852]: DEBUG oslo_vmware.api [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293129, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.319023} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.610950] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 915.611161] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 915.611340] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 915.611538] env[61852]: INFO nova.compute.manager [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Took 1.17 seconds to destroy the instance on the hypervisor. [ 915.611761] env[61852]: DEBUG oslo.service.loopingcall [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 915.611953] env[61852]: DEBUG nova.compute.manager [-] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 915.612529] env[61852]: DEBUG nova.network.neutron [-] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 915.666454] env[61852]: DEBUG nova.network.neutron [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Updating instance_info_cache with network_info: [{"id": "2d382abe-68f4-4b6f-a534-81e74a2503ef", "address": "fa:16:3e:c3:98:82", "network": {"id": "24c305c0-d7b3-4add-963f-f0a579e4b61a", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-93735973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9433be55641842fcade88f4bc39303fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d382abe-68", "ovs_interfaceid": "2d382abe-68f4-4b6f-a534-81e74a2503ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.857016] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.857016] env[61852]: DEBUG nova.compute.manager [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 915.858495] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.968s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.880063] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.259361} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.881054] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 915.881233] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b87f93-acfd-4376-965d-f5f046d92f32 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.905813] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 4623565b-cd36-498c-a0e9-c3b1c6ef479b/4623565b-cd36-498c-a0e9-c3b1c6ef479b.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 915.906755] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54a94762-07c3-4fe5-aa47-69d2fcfed9f5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.932177] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 915.932177] env[61852]: value = "task-1293131" [ 915.932177] env[61852]: _type = "Task" [ 915.932177] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.942162] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293131, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.027789] env[61852]: DEBUG nova.compute.manager [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 916.170572] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Releasing lock "refresh_cache-e97448d7-0162-44bf-95d1-93bdcbcaec25" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.171054] env[61852]: DEBUG nova.compute.manager [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Instance network_info: |[{"id": "2d382abe-68f4-4b6f-a534-81e74a2503ef", "address": "fa:16:3e:c3:98:82", "network": {"id": "24c305c0-d7b3-4add-963f-f0a579e4b61a", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-93735973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9433be55641842fcade88f4bc39303fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d382abe-68", "ovs_interfaceid": "2d382abe-68f4-4b6f-a534-81e74a2503ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 916.171388] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:98:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d382abe-68f4-4b6f-a534-81e74a2503ef', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.179101] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Creating folder: Project (9433be55641842fcade88f4bc39303fd). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.179422] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ddcdb4c5-9777-44ca-979b-07ecfad77722 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.190677] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Created folder: Project (9433be55641842fcade88f4bc39303fd) in parent group-v277280. [ 916.190898] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Creating folder: Instances. Parent ref: group-v277384. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.191133] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48563056-3c96-4372-9692-b7cb35ace8ff {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.197699] env[61852]: DEBUG nova.compute.manager [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Received event network-vif-plugged-2d382abe-68f4-4b6f-a534-81e74a2503ef {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 916.197924] env[61852]: DEBUG oslo_concurrency.lockutils [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] Acquiring lock "e97448d7-0162-44bf-95d1-93bdcbcaec25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.198150] env[61852]: DEBUG oslo_concurrency.lockutils [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] Lock "e97448d7-0162-44bf-95d1-93bdcbcaec25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.198323] env[61852]: DEBUG oslo_concurrency.lockutils [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] Lock "e97448d7-0162-44bf-95d1-93bdcbcaec25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.198539] env[61852]: DEBUG nova.compute.manager [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] No waiting events found dispatching network-vif-plugged-2d382abe-68f4-4b6f-a534-81e74a2503ef {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 916.198688] env[61852]: WARNING nova.compute.manager [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Received unexpected event network-vif-plugged-2d382abe-68f4-4b6f-a534-81e74a2503ef for instance with vm_state building and task_state spawning. [ 916.198869] env[61852]: DEBUG nova.compute.manager [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Received event network-changed-2d382abe-68f4-4b6f-a534-81e74a2503ef {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 916.199108] env[61852]: DEBUG nova.compute.manager [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Refreshing instance network info cache due to event network-changed-2d382abe-68f4-4b6f-a534-81e74a2503ef. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 916.200311] env[61852]: DEBUG oslo_concurrency.lockutils [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] Acquiring lock "refresh_cache-e97448d7-0162-44bf-95d1-93bdcbcaec25" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.200311] env[61852]: DEBUG oslo_concurrency.lockutils [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] Acquired lock "refresh_cache-e97448d7-0162-44bf-95d1-93bdcbcaec25" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.200311] env[61852]: DEBUG nova.network.neutron [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Refreshing network info cache for port 2d382abe-68f4-4b6f-a534-81e74a2503ef {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.203698] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Created folder: Instances in parent group-v277384. [ 916.203926] env[61852]: DEBUG oslo.service.loopingcall [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.204123] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.204343] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4422b17-80fa-449c-b0f6-e684516abcbe {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.221039] env[61852]: DEBUG nova.compute.manager [req-5894d62a-c3af-4109-8cb4-23866ad7c91b req-50dd1535-2f1a-4f9e-8bf5-e9e59d60482b service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Received event network-vif-deleted-ad6990c8-dfec-404b-9e08-011ac672c222 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 916.221246] env[61852]: INFO nova.compute.manager [req-5894d62a-c3af-4109-8cb4-23866ad7c91b req-50dd1535-2f1a-4f9e-8bf5-e9e59d60482b service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Neutron deleted interface ad6990c8-dfec-404b-9e08-011ac672c222; detaching it from the instance and deleting it from the info cache [ 916.221476] env[61852]: DEBUG nova.network.neutron [req-5894d62a-c3af-4109-8cb4-23866ad7c91b req-50dd1535-2f1a-4f9e-8bf5-e9e59d60482b service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.227879] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.227879] env[61852]: value = "task-1293134" [ 916.227879] env[61852]: _type = "Task" [ 916.227879] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.236959] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293134, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.362600] env[61852]: DEBUG nova.compute.utils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 916.367165] env[61852]: INFO nova.compute.claims [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.372939] env[61852]: DEBUG nova.compute.manager [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 916.378393] env[61852]: DEBUG nova.network.neutron [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.443557] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293131, 'name': ReconfigVM_Task, 'duration_secs': 0.261597} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.445747] env[61852]: DEBUG nova.policy [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1349b8262e345068742af657fa8cbd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4dbb543c66364861bf5f437c8c33a550', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 916.447740] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 4623565b-cd36-498c-a0e9-c3b1c6ef479b/4623565b-cd36-498c-a0e9-c3b1c6ef479b.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 916.449084] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bae582c4-1e0d-4b6c-b8ad-9b3562033eee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.456619] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 916.456619] env[61852]: value = "task-1293135" [ 916.456619] env[61852]: _type = "Task" [ 916.456619] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.472172] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293135, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.496674] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquiring lock "b99bacc1-21e7-4bbd-8092-549246500421" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.497348] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lock "b99bacc1-21e7-4bbd-8092-549246500421" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.546654] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.557312] env[61852]: DEBUG nova.network.neutron [-] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.725016] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8b59a13-fb1e-4f66-a26d-e6badabcf4a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.737557] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7327b53d-da3d-4aaf-be50-752125ca2711 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.759405] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293134, 'name': CreateVM_Task, 'duration_secs': 0.475857} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.759999] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.760729] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.760900] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.764345] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 916.764640] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e25ad990-faac-4c91-aeab-ad6478080b53 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.770201] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for the task: (returnval){ [ 916.770201] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524bce15-0ff0-f474-5542-f7ede8969810" [ 916.770201] env[61852]: _type = "Task" [ 916.770201] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.777714] env[61852]: DEBUG nova.compute.manager [req-5894d62a-c3af-4109-8cb4-23866ad7c91b req-50dd1535-2f1a-4f9e-8bf5-e9e59d60482b service nova] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Detach interface failed, port_id=ad6990c8-dfec-404b-9e08-011ac672c222, reason: Instance 6cb1968c-b951-4a83-a036-ba50b735133c could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 916.785204] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524bce15-0ff0-f474-5542-f7ede8969810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.787416] env[61852]: DEBUG nova.network.neutron [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Successfully created port: c7387a83-80b1-43cf-8e49-88ed66f63c70 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 916.878400] env[61852]: DEBUG nova.compute.manager [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 916.883116] env[61852]: INFO nova.compute.resource_tracker [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating resource usage from migration d85168bf-e137-4b9c-a79c-77442dc6a529 [ 916.965356] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293135, 'name': Rename_Task, 'duration_secs': 0.202286} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.968079] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 916.968832] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3504920d-5fcd-44fc-a3ce-fe49ce016b16 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.975564] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 916.975564] env[61852]: value = "task-1293136" [ 916.975564] env[61852]: _type = "Task" [ 916.975564] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.982422] env[61852]: DEBUG nova.network.neutron [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Updated VIF entry in instance network info cache for port 2d382abe-68f4-4b6f-a534-81e74a2503ef. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 916.982835] env[61852]: DEBUG nova.network.neutron [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Updating instance_info_cache with network_info: [{"id": "2d382abe-68f4-4b6f-a534-81e74a2503ef", "address": "fa:16:3e:c3:98:82", "network": {"id": "24c305c0-d7b3-4add-963f-f0a579e4b61a", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-93735973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9433be55641842fcade88f4bc39303fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d382abe-68", "ovs_interfaceid": "2d382abe-68f4-4b6f-a534-81e74a2503ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.990910] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293136, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.000293] env[61852]: DEBUG nova.compute.manager [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 917.059651] env[61852]: INFO nova.compute.manager [-] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Took 1.45 seconds to deallocate network for instance. [ 917.077361] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2250a0d-92de-4e40-905d-81f20bd57b18 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.085507] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650c0545-5fc6-4c1c-9de0-3c5c5f9dc555 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.118392] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70365ad-be04-4d4f-a122-7ea8e924b962 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.126757] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e953040a-6769-479b-8ff5-e9333762f508 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.144548] env[61852]: DEBUG nova.compute.provider_tree [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.287772] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524bce15-0ff0-f474-5542-f7ede8969810, 'name': SearchDatastore_Task, 'duration_secs': 0.019481} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.288146] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.288351] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.288593] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.288746] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.288946] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.290915] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-272b731d-7169-4d9a-afeb-82f201ddcf08 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.303794] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.304020] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.304869] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-174cc731-30dc-48f1-907c-1e4e534361fa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.310833] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for the task: (returnval){ [ 917.310833] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d6c377-5b5b-97ff-3c14-27b2043484bb" [ 917.310833] env[61852]: _type = "Task" [ 917.310833] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.319234] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d6c377-5b5b-97ff-3c14-27b2043484bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.486178] env[61852]: DEBUG oslo_concurrency.lockutils [req-fff4be14-cc82-453a-aaee-d746192f3888 req-20a13cc3-2494-4eb0-8169-c0ba1329c471 service nova] Releasing lock "refresh_cache-e97448d7-0162-44bf-95d1-93bdcbcaec25" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.486615] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293136, 'name': PowerOnVM_Task} progress is 98%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.519300] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.568414] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.648120] env[61852]: DEBUG nova.scheduler.client.report [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 917.820928] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d6c377-5b5b-97ff-3c14-27b2043484bb, 'name': SearchDatastore_Task, 'duration_secs': 0.008526} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.821743] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-762298b3-6952-4bd0-bbf7-85e96265312c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.827277] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for the task: (returnval){ [ 917.827277] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520fd68d-128d-ada1-8546-b361b71951a8" [ 917.827277] env[61852]: _type = "Task" [ 917.827277] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.834683] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520fd68d-128d-ada1-8546-b361b71951a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.894334] env[61852]: DEBUG nova.compute.manager [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 917.914491] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 917.914742] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 917.914904] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.915138] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 917.915298] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.915452] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 917.915663] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 917.915829] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 917.916013] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 917.916316] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 917.916510] env[61852]: DEBUG nova.virt.hardware [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 917.917379] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb353ab-365d-4248-a787-384e5ccf4c25 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.925657] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86362e71-c15e-41e6-a9db-60f64d49d0f7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.985848] env[61852]: DEBUG oslo_vmware.api [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293136, 'name': PowerOnVM_Task, 'duration_secs': 0.796968} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.986232] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 917.986442] env[61852]: INFO nova.compute.manager [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Took 6.40 seconds to spawn the instance on the hypervisor. [ 917.986627] env[61852]: DEBUG nova.compute.manager [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 917.987398] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a20182-b228-4333-9291-e37df15c7a6b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.153216] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.295s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.153457] env[61852]: INFO nova.compute.manager [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Migrating [ 918.153895] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.153895] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "compute-rpcapi-router" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.155267] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.609s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.158745] env[61852]: INFO nova.compute.claims [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.338753] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520fd68d-128d-ada1-8546-b361b71951a8, 'name': SearchDatastore_Task, 'duration_secs': 0.012946} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.339211] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.339424] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] e97448d7-0162-44bf-95d1-93bdcbcaec25/e97448d7-0162-44bf-95d1-93bdcbcaec25.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 918.339827] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab3a0599-7a23-4639-a098-fa07e1b8b04b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.346674] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for the task: (returnval){ [ 918.346674] env[61852]: value = "task-1293137" [ 918.346674] env[61852]: _type = "Task" [ 918.346674] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.355520] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.510851] env[61852]: INFO nova.compute.manager [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Took 26.68 seconds to build instance. [ 918.663612] env[61852]: INFO nova.compute.rpcapi [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 918.664272] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "compute-rpcapi-router" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.689664] env[61852]: DEBUG nova.compute.manager [req-4ed779c7-99d8-4427-819e-876f471b883d req-d60d8606-ce75-45d2-bc8a-fccb638c77bd service nova] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Received event network-vif-plugged-c7387a83-80b1-43cf-8e49-88ed66f63c70 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 918.689910] env[61852]: DEBUG oslo_concurrency.lockutils [req-4ed779c7-99d8-4427-819e-876f471b883d req-d60d8606-ce75-45d2-bc8a-fccb638c77bd service nova] Acquiring lock "cc5e0467-2960-43a1-bd7b-a528d5788028-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.690184] env[61852]: DEBUG oslo_concurrency.lockutils [req-4ed779c7-99d8-4427-819e-876f471b883d req-d60d8606-ce75-45d2-bc8a-fccb638c77bd service nova] Lock "cc5e0467-2960-43a1-bd7b-a528d5788028-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.690362] env[61852]: DEBUG oslo_concurrency.lockutils [req-4ed779c7-99d8-4427-819e-876f471b883d req-d60d8606-ce75-45d2-bc8a-fccb638c77bd service nova] Lock "cc5e0467-2960-43a1-bd7b-a528d5788028-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.690534] env[61852]: DEBUG nova.compute.manager [req-4ed779c7-99d8-4427-819e-876f471b883d req-d60d8606-ce75-45d2-bc8a-fccb638c77bd service nova] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] No waiting events found dispatching network-vif-plugged-c7387a83-80b1-43cf-8e49-88ed66f63c70 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 918.690706] env[61852]: WARNING nova.compute.manager [req-4ed779c7-99d8-4427-819e-876f471b883d req-d60d8606-ce75-45d2-bc8a-fccb638c77bd service nova] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Received unexpected event network-vif-plugged-c7387a83-80b1-43cf-8e49-88ed66f63c70 for instance with vm_state building and task_state spawning. [ 918.846029] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41f0858-81b4-43f6-ab45-862662992568 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.858980] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490854} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.860043] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ed06b0-ec78-490a-a374-a91a0c95ebb6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.863121] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] e97448d7-0162-44bf-95d1-93bdcbcaec25/e97448d7-0162-44bf-95d1-93bdcbcaec25.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 918.863307] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 918.863552] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d87a4bc6-4b80-4619-b481-d09a6eda9719 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.918029] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8578ac0d-851e-4763-bc75-b0b647132283 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.921998] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for the task: (returnval){ [ 918.921998] env[61852]: value = "task-1293138" [ 918.921998] env[61852]: _type = "Task" [ 918.921998] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.931745] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acdc9cb-7097-47a9-971b-2b31de6c871e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.942368] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293138, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05908} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.943210] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.944399] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa79ab66-1551-4527-9225-355b5870d4ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.955578] env[61852]: DEBUG nova.compute.provider_tree [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.976639] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] e97448d7-0162-44bf-95d1-93bdcbcaec25/e97448d7-0162-44bf-95d1-93bdcbcaec25.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.976772] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19f10e08-4151-44a3-8bb1-e00445ac0b81 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.995904] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for the task: (returnval){ [ 918.995904] env[61852]: value = "task-1293139" [ 918.995904] env[61852]: _type = "Task" [ 918.995904] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.003858] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293139, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.013891] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2620e8a5-c831-41a4-928a-ed075443fd30 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.195s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.141902] env[61852]: DEBUG nova.network.neutron [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Successfully updated port: c7387a83-80b1-43cf-8e49-88ed66f63c70 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.175742] env[61852]: DEBUG nova.compute.manager [req-8fe6da92-52e2-4149-a225-b308f10b425c req-64225f7f-0e74-431a-b89b-9eda9f620eb0 service nova] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Received event network-changed-c7387a83-80b1-43cf-8e49-88ed66f63c70 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.175999] env[61852]: DEBUG nova.compute.manager [req-8fe6da92-52e2-4149-a225-b308f10b425c req-64225f7f-0e74-431a-b89b-9eda9f620eb0 service nova] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Refreshing instance network info cache due to event network-changed-c7387a83-80b1-43cf-8e49-88ed66f63c70. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 919.176293] env[61852]: DEBUG oslo_concurrency.lockutils [req-8fe6da92-52e2-4149-a225-b308f10b425c req-64225f7f-0e74-431a-b89b-9eda9f620eb0 service nova] Acquiring lock "refresh_cache-cc5e0467-2960-43a1-bd7b-a528d5788028" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.176463] env[61852]: DEBUG oslo_concurrency.lockutils [req-8fe6da92-52e2-4149-a225-b308f10b425c req-64225f7f-0e74-431a-b89b-9eda9f620eb0 service nova] Acquired lock "refresh_cache-cc5e0467-2960-43a1-bd7b-a528d5788028" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.176657] env[61852]: DEBUG nova.network.neutron [req-8fe6da92-52e2-4149-a225-b308f10b425c req-64225f7f-0e74-431a-b89b-9eda9f620eb0 service nova] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Refreshing network info cache for port c7387a83-80b1-43cf-8e49-88ed66f63c70 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 919.189317] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.189543] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.189668] env[61852]: DEBUG nova.network.neutron [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 919.458748] env[61852]: DEBUG nova.scheduler.client.report [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.507157] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293139, 'name': ReconfigVM_Task, 'duration_secs': 0.279479} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.507157] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Reconfigured VM instance instance-00000054 to attach disk [datastore1] e97448d7-0162-44bf-95d1-93bdcbcaec25/e97448d7-0162-44bf-95d1-93bdcbcaec25.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.507638] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3e19138-63f7-4c2d-8a8c-0ca4f3351ee6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.516715] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for the task: (returnval){ [ 919.516715] env[61852]: value = "task-1293140" [ 919.516715] env[61852]: _type = "Task" [ 919.516715] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.524399] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293140, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.642512] env[61852]: DEBUG nova.compute.manager [req-c3515743-0812-4fc4-bff7-cb0a9bd17048 req-7a091d3c-bb00-4e47-ad50-d9ae1df2d723 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received event network-changed-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.642598] env[61852]: DEBUG nova.compute.manager [req-c3515743-0812-4fc4-bff7-cb0a9bd17048 req-7a091d3c-bb00-4e47-ad50-d9ae1df2d723 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing instance network info cache due to event network-changed-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 919.643282] env[61852]: DEBUG oslo_concurrency.lockutils [req-c3515743-0812-4fc4-bff7-cb0a9bd17048 req-7a091d3c-bb00-4e47-ad50-d9ae1df2d723 service nova] Acquiring lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.643282] env[61852]: DEBUG oslo_concurrency.lockutils [req-c3515743-0812-4fc4-bff7-cb0a9bd17048 req-7a091d3c-bb00-4e47-ad50-d9ae1df2d723 service nova] Acquired lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.643282] env[61852]: DEBUG nova.network.neutron [req-c3515743-0812-4fc4-bff7-cb0a9bd17048 req-7a091d3c-bb00-4e47-ad50-d9ae1df2d723 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing network info cache for port 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 919.646636] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "refresh_cache-cc5e0467-2960-43a1-bd7b-a528d5788028" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.716345] env[61852]: DEBUG nova.network.neutron [req-8fe6da92-52e2-4149-a225-b308f10b425c req-64225f7f-0e74-431a-b89b-9eda9f620eb0 service nova] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.891524] env[61852]: DEBUG nova.network.neutron [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance_info_cache with network_info: [{"id": "61e94b93-d030-4c70-8ffc-ce81cbf29d01", "address": "fa:16:3e:23:a2:0c", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61e94b93-d0", "ovs_interfaceid": "61e94b93-d030-4c70-8ffc-ce81cbf29d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.902084] env[61852]: DEBUG nova.network.neutron [req-8fe6da92-52e2-4149-a225-b308f10b425c req-64225f7f-0e74-431a-b89b-9eda9f620eb0 service nova] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.966112] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.808s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.966112] env[61852]: DEBUG nova.compute.manager [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 919.967754] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.449s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.969465] env[61852]: INFO nova.compute.claims [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 920.027387] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293140, 'name': Rename_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.396618] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.404829] env[61852]: DEBUG oslo_concurrency.lockutils [req-8fe6da92-52e2-4149-a225-b308f10b425c req-64225f7f-0e74-431a-b89b-9eda9f620eb0 service nova] Releasing lock "refresh_cache-cc5e0467-2960-43a1-bd7b-a528d5788028" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.405258] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "refresh_cache-cc5e0467-2960-43a1-bd7b-a528d5788028" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.405447] env[61852]: DEBUG nova.network.neutron [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.444399] env[61852]: DEBUG nova.network.neutron [req-c3515743-0812-4fc4-bff7-cb0a9bd17048 req-7a091d3c-bb00-4e47-ad50-d9ae1df2d723 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updated VIF entry in instance network info cache for port 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 920.445122] env[61852]: DEBUG nova.network.neutron [req-c3515743-0812-4fc4-bff7-cb0a9bd17048 req-7a091d3c-bb00-4e47-ad50-d9ae1df2d723 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [{"id": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "address": "fa:16:3e:b2:2d:44", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab757ae-eb", "ovs_interfaceid": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.474025] env[61852]: DEBUG nova.compute.utils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 920.478711] env[61852]: DEBUG nova.compute.manager [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 920.478711] env[61852]: DEBUG nova.network.neutron [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 920.516606] env[61852]: DEBUG nova.policy [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be922c40dddf48c8ae436d0a244e7b6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdac3605118e44a69d44ab56cafe2e21', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 920.527590] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293140, 'name': Rename_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.681479] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.681479] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.943271] env[61852]: DEBUG nova.network.neutron [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 920.947495] env[61852]: DEBUG oslo_concurrency.lockutils [req-c3515743-0812-4fc4-bff7-cb0a9bd17048 req-7a091d3c-bb00-4e47-ad50-d9ae1df2d723 service nova] Releasing lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.981025] env[61852]: DEBUG nova.compute.manager [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 921.032771] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293140, 'name': Rename_Task, 'duration_secs': 1.477641} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.033076] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.033324] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-236be4ef-67a7-4536-91de-917b81d05036 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.040598] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for the task: (returnval){ [ 921.040598] env[61852]: value = "task-1293141" [ 921.040598] env[61852]: _type = "Task" [ 921.040598] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.049667] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.101513] env[61852]: DEBUG nova.network.neutron [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Updating instance_info_cache with network_info: [{"id": "c7387a83-80b1-43cf-8e49-88ed66f63c70", "address": "fa:16:3e:72:80:57", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7387a83-80", "ovs_interfaceid": "c7387a83-80b1-43cf-8e49-88ed66f63c70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.143435] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568d35a2-b85d-4828-b359-7b7ec15d3331 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.151293] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202ed6a6-c5ad-42ba-9c93-80f27a40275c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.182724] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3524c8de-30d1-4ab7-ae24-8e916a53e24e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.186607] env[61852]: DEBUG nova.compute.manager [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 921.196885] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091b7d28-aa30-4148-b54a-c03588817b22 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.213178] env[61852]: DEBUG nova.compute.provider_tree [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.222726] env[61852]: DEBUG nova.network.neutron [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Successfully created port: 3e0cb15e-f2d1-47c8-975c-dd685e0ad664 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 921.550499] env[61852]: DEBUG oslo_vmware.api [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293141, 'name': PowerOnVM_Task, 'duration_secs': 0.50318} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.550846] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 921.551196] env[61852]: INFO nova.compute.manager [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Took 7.71 seconds to spawn the instance on the hypervisor. [ 921.551523] env[61852]: DEBUG nova.compute.manager [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 921.552470] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19908dcd-f693-41dc-84e5-31285a60c9e0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.604134] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "refresh_cache-cc5e0467-2960-43a1-bd7b-a528d5788028" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.604530] env[61852]: DEBUG nova.compute.manager [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Instance network_info: |[{"id": "c7387a83-80b1-43cf-8e49-88ed66f63c70", "address": "fa:16:3e:72:80:57", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7387a83-80", "ovs_interfaceid": "c7387a83-80b1-43cf-8e49-88ed66f63c70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 921.605142] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:80:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2c424c9-6446-4b2a-af8c-4d9c29117c39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7387a83-80b1-43cf-8e49-88ed66f63c70', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.614803] env[61852]: DEBUG oslo.service.loopingcall [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 921.615348] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.615596] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dada44f5-ed09-455a-9e7a-94015fa24774 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.635826] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.635826] env[61852]: value = "task-1293142" [ 921.635826] env[61852]: _type = "Task" [ 921.635826] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.646635] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293142, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.708190] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.716876] env[61852]: DEBUG nova.scheduler.client.report [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 921.910920] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbebdce0-43a9-4d73-9bf0-88bd95050872 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.929715] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance '51ecc9c3-a3fc-4bd7-8c90-003451700212' progress to 0 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 921.993767] env[61852]: DEBUG nova.compute.manager [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 922.020971] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 922.021213] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 922.021400] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 922.021900] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 922.021900] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 922.021900] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 922.022134] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 922.022318] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 922.022494] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 922.022664] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 922.022841] env[61852]: DEBUG nova.virt.hardware [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 922.023762] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4838b8-414b-4e32-91cf-11be9cc816d1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.031377] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67f8a87-e1e3-4033-ba5e-b78c21c2821f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.068845] env[61852]: INFO nova.compute.manager [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Took 23.64 seconds to build instance. [ 922.145682] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293142, 'name': CreateVM_Task, 'duration_secs': 0.373563} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.145861] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.146663] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.146825] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.147379] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 922.147888] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec34d7df-7dad-4e80-bf8f-998e406c67a1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.151938] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 922.151938] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5290a2fe-480b-aa36-68a8-100e2faf0d20" [ 922.151938] env[61852]: _type = "Task" [ 922.151938] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.158976] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5290a2fe-480b-aa36-68a8-100e2faf0d20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.221516] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.222108] env[61852]: DEBUG nova.compute.manager [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 922.224884] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.657s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.225173] env[61852]: DEBUG nova.objects.instance [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lazy-loading 'resources' on Instance uuid 6cb1968c-b951-4a83-a036-ba50b735133c {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.437489] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 922.437814] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36885976-18d8-4acf-863d-2323100b6e89 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.444338] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 922.444338] env[61852]: value = "task-1293143" [ 922.444338] env[61852]: _type = "Task" [ 922.444338] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.452588] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.572069] env[61852]: DEBUG oslo_concurrency.lockutils [None req-733e7099-e149-4e98-85f8-6f7113854bc4 tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lock "e97448d7-0162-44bf-95d1-93bdcbcaec25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.153s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.661877] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5290a2fe-480b-aa36-68a8-100e2faf0d20, 'name': SearchDatastore_Task, 'duration_secs': 0.012896} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.662291] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.662532] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.662774] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.662926] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.663163] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.663444] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d5806cd-2569-4aa0-afc0-4df82904da49 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.671067] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.671354] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 922.672057] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdaa5e25-e40d-4297-a8d1-54b4d9dfb949 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.676775] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 922.676775] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]521e6722-826e-7a2c-46bb-62420e1aa13e" [ 922.676775] env[61852]: _type = "Task" [ 922.676775] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.684076] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]521e6722-826e-7a2c-46bb-62420e1aa13e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.728115] env[61852]: DEBUG nova.compute.utils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 922.736913] env[61852]: DEBUG nova.compute.manager [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 922.738330] env[61852]: DEBUG nova.network.neutron [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 922.772164] env[61852]: DEBUG nova.compute.manager [req-41d2e321-5eaf-4f8e-82ec-ceab22656025 req-c1a5ced6-944f-4b84-b935-99e158d3c093 service nova] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Received event network-vif-plugged-3e0cb15e-f2d1-47c8-975c-dd685e0ad664 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 922.772392] env[61852]: DEBUG oslo_concurrency.lockutils [req-41d2e321-5eaf-4f8e-82ec-ceab22656025 req-c1a5ced6-944f-4b84-b935-99e158d3c093 service nova] Acquiring lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.772618] env[61852]: DEBUG oslo_concurrency.lockutils [req-41d2e321-5eaf-4f8e-82ec-ceab22656025 req-c1a5ced6-944f-4b84-b935-99e158d3c093 service nova] Lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.772786] env[61852]: DEBUG oslo_concurrency.lockutils [req-41d2e321-5eaf-4f8e-82ec-ceab22656025 req-c1a5ced6-944f-4b84-b935-99e158d3c093 service nova] Lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.772960] env[61852]: DEBUG nova.compute.manager [req-41d2e321-5eaf-4f8e-82ec-ceab22656025 req-c1a5ced6-944f-4b84-b935-99e158d3c093 service nova] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] No waiting events found dispatching network-vif-plugged-3e0cb15e-f2d1-47c8-975c-dd685e0ad664 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 922.774183] env[61852]: WARNING nova.compute.manager [req-41d2e321-5eaf-4f8e-82ec-ceab22656025 req-c1a5ced6-944f-4b84-b935-99e158d3c093 service nova] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Received unexpected event network-vif-plugged-3e0cb15e-f2d1-47c8-975c-dd685e0ad664 for instance with vm_state building and task_state spawning. [ 922.788911] env[61852]: DEBUG nova.policy [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15c8c2294b2e4bf29152db8367b88831', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '558cacb83ff34e9db608ade12bd52a5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 922.899244] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8df16e-32e4-48b4-b856-39905d74b8ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.906859] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf505e9-482d-41b5-94ae-94506cb04346 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.941132] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf1014d-bdae-488c-bb39-1ff371fefc0e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.952204] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7a3e3f-70ed-4379-aed9-f4d51d0ecd15 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.959246] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293143, 'name': PowerOffVM_Task, 'duration_secs': 0.204236} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.959849] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.960088] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance '51ecc9c3-a3fc-4bd7-8c90-003451700212' progress to 17 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 922.972519] env[61852]: DEBUG nova.compute.provider_tree [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.991329] env[61852]: DEBUG nova.network.neutron [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Successfully updated port: 3e0cb15e-f2d1-47c8-975c-dd685e0ad664 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.994324] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquiring lock "e97448d7-0162-44bf-95d1-93bdcbcaec25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.994573] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lock "e97448d7-0162-44bf-95d1-93bdcbcaec25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.994827] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquiring lock "e97448d7-0162-44bf-95d1-93bdcbcaec25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.994948] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lock "e97448d7-0162-44bf-95d1-93bdcbcaec25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.995911] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lock "e97448d7-0162-44bf-95d1-93bdcbcaec25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.998976] env[61852]: INFO nova.compute.manager [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Terminating instance [ 922.998976] env[61852]: DEBUG nova.compute.manager [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 922.998976] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 922.999901] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83741f17-5f15-4f79-bb1a-b31a568278b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.007934] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.008240] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1e7545f-1b42-46c4-8ff7-6eadd348ce3d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.015063] env[61852]: DEBUG oslo_vmware.api [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for the task: (returnval){ [ 923.015063] env[61852]: value = "task-1293144" [ 923.015063] env[61852]: _type = "Task" [ 923.015063] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.024524] env[61852]: DEBUG oslo_vmware.api [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293144, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.074891] env[61852]: DEBUG nova.network.neutron [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Successfully created port: 2c88ebc1-acb3-41ba-8d38-8647755ce777 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.187174] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]521e6722-826e-7a2c-46bb-62420e1aa13e, 'name': SearchDatastore_Task, 'duration_secs': 0.015989} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.187998] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4345384c-bc99-44a0-9e97-9c0c044c8148 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.192974] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 923.192974] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f17f24-8371-e356-12ba-82f4e988f0ef" [ 923.192974] env[61852]: _type = "Task" [ 923.192974] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.200240] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f17f24-8371-e356-12ba-82f4e988f0ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.238018] env[61852]: DEBUG nova.compute.manager [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 923.476957] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 923.477232] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 923.477401] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.477591] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 923.477741] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.477893] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 923.478119] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 923.478292] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 923.478463] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 923.478629] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 923.478804] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 923.484290] env[61852]: DEBUG nova.scheduler.client.report [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 923.487472] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed8f4741-d9e3-4aa5-a0ac-5d39e97d879d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.498750] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.498942] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.499048] env[61852]: DEBUG nova.network.neutron [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.507315] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 923.507315] env[61852]: value = "task-1293145" [ 923.507315] env[61852]: _type = "Task" [ 923.507315] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.516233] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.524108] env[61852]: DEBUG oslo_vmware.api [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293144, 'name': PowerOffVM_Task, 'duration_secs': 0.157306} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.524395] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 923.524575] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 923.524836] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd5a07f9-b2e8-4198-afae-72e20f4a499f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.583325] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 923.583610] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 923.583746] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Deleting the datastore file [datastore1] e97448d7-0162-44bf-95d1-93bdcbcaec25 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 923.584023] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66fa7c31-09a8-4d02-8649-23f61e18d246 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.590434] env[61852]: DEBUG oslo_vmware.api [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for the task: (returnval){ [ 923.590434] env[61852]: value = "task-1293147" [ 923.590434] env[61852]: _type = "Task" [ 923.590434] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.598490] env[61852]: DEBUG oslo_vmware.api [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.702513] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f17f24-8371-e356-12ba-82f4e988f0ef, 'name': SearchDatastore_Task, 'duration_secs': 0.016091} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.702813] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.703092] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] cc5e0467-2960-43a1-bd7b-a528d5788028/cc5e0467-2960-43a1-bd7b-a528d5788028.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 923.703374] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cfae6286-483e-4109-a146-8366cc6c3b7f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.712059] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 923.712059] env[61852]: value = "task-1293148" [ 923.712059] env[61852]: _type = "Task" [ 923.712059] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.720037] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293148, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.999991] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.775s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.002428] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.294s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.004217] env[61852]: INFO nova.compute.claims [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 924.022464] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293145, 'name': ReconfigVM_Task, 'duration_secs': 0.199118} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.023221] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance '51ecc9c3-a3fc-4bd7-8c90-003451700212' progress to 33 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 924.028975] env[61852]: INFO nova.scheduler.client.report [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleted allocations for instance 6cb1968c-b951-4a83-a036-ba50b735133c [ 924.087153] env[61852]: DEBUG nova.network.neutron [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.103632] env[61852]: DEBUG oslo_vmware.api [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Task: {'id': task-1293147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218209} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.103995] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.104321] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 924.104541] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 924.104795] env[61852]: INFO nova.compute.manager [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Took 1.11 seconds to destroy the instance on the hypervisor. [ 924.105134] env[61852]: DEBUG oslo.service.loopingcall [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.105400] env[61852]: DEBUG nova.compute.manager [-] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 924.105520] env[61852]: DEBUG nova.network.neutron [-] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 924.223383] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293148, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.248581] env[61852]: DEBUG nova.compute.manager [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 924.282819] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 924.283090] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 924.283257] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.287152] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 924.287421] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.287549] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 924.288805] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 924.289045] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 924.289244] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 924.289455] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 924.289602] env[61852]: DEBUG nova.virt.hardware [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 924.290500] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d88683-ca55-4b88-be45-6ca1b6a158fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.302897] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e553da1-ca8b-4a5a-9504-2f6766693c1e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.374663] env[61852]: DEBUG nova.network.neutron [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Updating instance_info_cache with network_info: [{"id": "3e0cb15e-f2d1-47c8-975c-dd685e0ad664", "address": "fa:16:3e:e3:82:31", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e0cb15e-f2", "ovs_interfaceid": "3e0cb15e-f2d1-47c8-975c-dd685e0ad664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.479208] env[61852]: DEBUG nova.compute.manager [req-43e6184a-8178-4ea1-95c5-ce1c436a0ed6 req-9037aa6a-7cd1-425c-a355-5d521c17cf0c service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Received event network-vif-deleted-2d382abe-68f4-4b6f-a534-81e74a2503ef {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 924.479386] env[61852]: INFO nova.compute.manager [req-43e6184a-8178-4ea1-95c5-ce1c436a0ed6 req-9037aa6a-7cd1-425c-a355-5d521c17cf0c service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Neutron deleted interface 2d382abe-68f4-4b6f-a534-81e74a2503ef; detaching it from the instance and deleting it from the info cache [ 924.479569] env[61852]: DEBUG nova.network.neutron [req-43e6184a-8178-4ea1-95c5-ce1c436a0ed6 req-9037aa6a-7cd1-425c-a355-5d521c17cf0c service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.532218] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 924.532479] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 924.532765] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.532967] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 924.533165] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.533286] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 924.533506] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 924.533675] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 924.533844] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 924.534016] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 924.534203] env[61852]: DEBUG nova.virt.hardware [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 924.540117] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Reconfiguring VM instance instance-00000052 to detach disk 2000 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 924.542024] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d81559f-62e0-49f4-b822-9ff773b223ed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.557980] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c515d68c-23af-4752-8d84-584545ca8373 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "6cb1968c-b951-4a83-a036-ba50b735133c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.126s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.564885] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 924.564885] env[61852]: value = "task-1293149" [ 924.564885] env[61852]: _type = "Task" [ 924.564885] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.575947] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293149, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.606947] env[61852]: DEBUG nova.network.neutron [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Successfully updated port: 2c88ebc1-acb3-41ba-8d38-8647755ce777 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.722779] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293148, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596818} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.722940] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] cc5e0467-2960-43a1-bd7b-a528d5788028/cc5e0467-2960-43a1-bd7b-a528d5788028.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 924.723050] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 924.723337] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27163be8-3a4c-465b-9240-9821073408fb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.729067] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 924.729067] env[61852]: value = "task-1293150" [ 924.729067] env[61852]: _type = "Task" [ 924.729067] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.738492] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293150, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.796766] env[61852]: DEBUG nova.compute.manager [req-a77d1d84-33b1-49f0-b1b6-38bd07727574 req-a049ea11-0f9f-41de-8a50-834707c7fc09 service nova] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Received event network-changed-3e0cb15e-f2d1-47c8-975c-dd685e0ad664 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 924.797025] env[61852]: DEBUG nova.compute.manager [req-a77d1d84-33b1-49f0-b1b6-38bd07727574 req-a049ea11-0f9f-41de-8a50-834707c7fc09 service nova] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Refreshing instance network info cache due to event network-changed-3e0cb15e-f2d1-47c8-975c-dd685e0ad664. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 924.797271] env[61852]: DEBUG oslo_concurrency.lockutils [req-a77d1d84-33b1-49f0-b1b6-38bd07727574 req-a049ea11-0f9f-41de-8a50-834707c7fc09 service nova] Acquiring lock "refresh_cache-8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.877592] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.877999] env[61852]: DEBUG nova.compute.manager [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Instance network_info: |[{"id": "3e0cb15e-f2d1-47c8-975c-dd685e0ad664", "address": "fa:16:3e:e3:82:31", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e0cb15e-f2", "ovs_interfaceid": "3e0cb15e-f2d1-47c8-975c-dd685e0ad664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 924.878354] env[61852]: DEBUG oslo_concurrency.lockutils [req-a77d1d84-33b1-49f0-b1b6-38bd07727574 req-a049ea11-0f9f-41de-8a50-834707c7fc09 service nova] Acquired lock "refresh_cache-8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.879247] env[61852]: DEBUG nova.network.neutron [req-a77d1d84-33b1-49f0-b1b6-38bd07727574 req-a049ea11-0f9f-41de-8a50-834707c7fc09 service nova] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Refreshing network info cache for port 3e0cb15e-f2d1-47c8-975c-dd685e0ad664 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 924.879881] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:82:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e0cb15e-f2d1-47c8-975c-dd685e0ad664', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.889241] env[61852]: DEBUG oslo.service.loopingcall [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.891383] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 924.891550] env[61852]: DEBUG nova.network.neutron [-] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.893267] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe969ca0-97b6-4086-843b-faadf2f2c874 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.916592] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.916592] env[61852]: value = "task-1293151" [ 924.916592] env[61852]: _type = "Task" [ 924.916592] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.924640] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293151, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.982266] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f866fd1-73bc-4b99-b11d-a343f8b61606 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.994299] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f7adc5-2fc3-4e01-868e-e36e85f821b9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.036422] env[61852]: DEBUG nova.compute.manager [req-43e6184a-8178-4ea1-95c5-ce1c436a0ed6 req-9037aa6a-7cd1-425c-a355-5d521c17cf0c service nova] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Detach interface failed, port_id=2d382abe-68f4-4b6f-a534-81e74a2503ef, reason: Instance e97448d7-0162-44bf-95d1-93bdcbcaec25 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 925.080237] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293149, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.111887] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquiring lock "refresh_cache-b99bacc1-21e7-4bbd-8092-549246500421" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.111887] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquired lock "refresh_cache-b99bacc1-21e7-4bbd-8092-549246500421" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.111887] env[61852]: DEBUG nova.network.neutron [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 925.145083] env[61852]: DEBUG nova.network.neutron [req-a77d1d84-33b1-49f0-b1b6-38bd07727574 req-a049ea11-0f9f-41de-8a50-834707c7fc09 service nova] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Updated VIF entry in instance network info cache for port 3e0cb15e-f2d1-47c8-975c-dd685e0ad664. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 925.145510] env[61852]: DEBUG nova.network.neutron [req-a77d1d84-33b1-49f0-b1b6-38bd07727574 req-a049ea11-0f9f-41de-8a50-834707c7fc09 service nova] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Updating instance_info_cache with network_info: [{"id": "3e0cb15e-f2d1-47c8-975c-dd685e0ad664", "address": "fa:16:3e:e3:82:31", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e0cb15e-f2", "ovs_interfaceid": "3e0cb15e-f2d1-47c8-975c-dd685e0ad664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.170431] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c629a66b-e730-4ab5-8d8b-f9dd0b1c2739 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.179837] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662d4a07-3628-47b6-a104-e74b99146661 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.213448] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4fd8e5-0601-4eb0-8d6b-c8781b3d8c22 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.221740] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb1b6a3-0c23-41c4-8251-70e7ab905d84 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.236356] env[61852]: DEBUG nova.compute.provider_tree [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 925.246579] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293150, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0631} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.246579] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.247163] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94f6cc4-9c5e-4518-b24e-07ed84df489b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.263712] env[61852]: DEBUG oslo_concurrency.lockutils [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "254919cb-e3cd-4288-8696-95e632d78a38" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.263849] env[61852]: DEBUG oslo_concurrency.lockutils [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "254919cb-e3cd-4288-8696-95e632d78a38" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.264187] env[61852]: DEBUG oslo_concurrency.lockutils [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "254919cb-e3cd-4288-8696-95e632d78a38-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.264555] env[61852]: DEBUG oslo_concurrency.lockutils [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "254919cb-e3cd-4288-8696-95e632d78a38-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.264610] env[61852]: DEBUG oslo_concurrency.lockutils [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "254919cb-e3cd-4288-8696-95e632d78a38-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.275458] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] cc5e0467-2960-43a1-bd7b-a528d5788028/cc5e0467-2960-43a1-bd7b-a528d5788028.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.276008] env[61852]: INFO nova.compute.manager [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Terminating instance [ 925.277698] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-728023e0-b3d5-48ba-bb13-a7762c02c107 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.292791] env[61852]: DEBUG nova.compute.manager [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 925.292997] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.294076] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5bd40c-373e-4b8a-a51d-121e7f67c6ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.302309] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.303514] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78d7af79-dfbb-4589-baac-eb12702d5340 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.304974] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 925.304974] env[61852]: value = "task-1293152" [ 925.304974] env[61852]: _type = "Task" [ 925.304974] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.313944] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293152, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.315766] env[61852]: DEBUG oslo_vmware.api [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 925.315766] env[61852]: value = "task-1293153" [ 925.315766] env[61852]: _type = "Task" [ 925.315766] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.324706] env[61852]: DEBUG oslo_vmware.api [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293153, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.394221] env[61852]: INFO nova.compute.manager [-] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Took 1.29 seconds to deallocate network for instance. [ 925.427038] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293151, 'name': CreateVM_Task} progress is 25%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.578750] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293149, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.648090] env[61852]: DEBUG oslo_concurrency.lockutils [req-a77d1d84-33b1-49f0-b1b6-38bd07727574 req-a049ea11-0f9f-41de-8a50-834707c7fc09 service nova] Releasing lock "refresh_cache-8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.649287] env[61852]: DEBUG nova.network.neutron [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.758404] env[61852]: ERROR nova.scheduler.client.report [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [req-5105ef7e-64d4-406a-a52a-58f36f3fa8ad] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5105ef7e-64d4-406a-a52a-58f36f3fa8ad"}]} [ 925.775466] env[61852]: DEBUG nova.scheduler.client.report [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 925.783429] env[61852]: DEBUG nova.network.neutron [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Updating instance_info_cache with network_info: [{"id": "2c88ebc1-acb3-41ba-8d38-8647755ce777", "address": "fa:16:3e:38:78:4b", "network": {"id": "3625b90d-5a52-41f5-958d-5de80d933746", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-256880241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "558cacb83ff34e9db608ade12bd52a5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c88ebc1-ac", "ovs_interfaceid": "2c88ebc1-acb3-41ba-8d38-8647755ce777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.791656] env[61852]: DEBUG nova.scheduler.client.report [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 925.791895] env[61852]: DEBUG nova.compute.provider_tree [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 925.804476] env[61852]: DEBUG nova.scheduler.client.report [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 925.815541] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293152, 'name': ReconfigVM_Task, 'duration_secs': 0.368025} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.815878] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Reconfigured VM instance instance-00000055 to attach disk [datastore2] cc5e0467-2960-43a1-bd7b-a528d5788028/cc5e0467-2960-43a1-bd7b-a528d5788028.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.816649] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0880924e-1529-478d-a548-2ffe05499067 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.823606] env[61852]: DEBUG nova.scheduler.client.report [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 925.829824] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 925.829824] env[61852]: value = "task-1293154" [ 925.829824] env[61852]: _type = "Task" [ 925.829824] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.830064] env[61852]: DEBUG oslo_vmware.api [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293153, 'name': PowerOffVM_Task, 'duration_secs': 0.27839} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.830363] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.830535] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.833331] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76daac3b-7563-462c-ae09-19acf0aaef3d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.839870] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293154, 'name': Rename_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.848284] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "ba863c60-444a-4959-8f8f-87b4952d2872" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.848537] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "ba863c60-444a-4959-8f8f-87b4952d2872" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.899152] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.899642] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.899642] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleting the datastore file [datastore1] 254919cb-e3cd-4288-8696-95e632d78a38 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.899905] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-859764c9-e413-4584-9882-8912fdd74b86 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.903070] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.910805] env[61852]: DEBUG oslo_vmware.api [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 925.910805] env[61852]: value = "task-1293156" [ 925.910805] env[61852]: _type = "Task" [ 925.910805] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.919288] env[61852]: DEBUG oslo_vmware.api [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293156, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.930155] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293151, 'name': CreateVM_Task} progress is 25%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.984565] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92178d55-e817-4bf4-8738-aa81388cc1e1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.992175] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6e44a8-c518-4da4-b6fd-5920f2d18f5c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.023440] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd682ee-4bb2-4511-a09a-2ab25772b403 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.030503] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7927bbe1-e709-482b-a58c-9cd49406e914 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.043192] env[61852]: DEBUG nova.compute.provider_tree [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.076257] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293149, 'name': ReconfigVM_Task, 'duration_secs': 1.215303} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.076537] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Reconfigured VM instance instance-00000052 to detach disk 2000 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 926.077284] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d18cd1-51b0-4a80-a1f3-3b1960dae130 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.098131] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 51ecc9c3-a3fc-4bd7-8c90-003451700212/51ecc9c3-a3fc-4bd7-8c90-003451700212.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.098401] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90e6425e-5b7c-4d10-a00f-268d97866e1d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.110554] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "8d8679db-eb9d-45c1-b053-70378f58e273" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.110781] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "8d8679db-eb9d-45c1-b053-70378f58e273" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.110951] env[61852]: INFO nova.compute.manager [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Shelving [ 926.117636] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 926.117636] env[61852]: value = "task-1293157" [ 926.117636] env[61852]: _type = "Task" [ 926.117636] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.126838] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293157, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.286445] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Releasing lock "refresh_cache-b99bacc1-21e7-4bbd-8092-549246500421" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.286772] env[61852]: DEBUG nova.compute.manager [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Instance network_info: |[{"id": "2c88ebc1-acb3-41ba-8d38-8647755ce777", "address": "fa:16:3e:38:78:4b", "network": {"id": "3625b90d-5a52-41f5-958d-5de80d933746", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-256880241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "558cacb83ff34e9db608ade12bd52a5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c88ebc1-ac", "ovs_interfaceid": "2c88ebc1-acb3-41ba-8d38-8647755ce777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 926.287222] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:78:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c88ebc1-acb3-41ba-8d38-8647755ce777', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.294555] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Creating folder: Project (558cacb83ff34e9db608ade12bd52a5d). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 926.294828] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a6c31f9-81e9-4267-9bad-72f02fcf5601 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.305639] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Created folder: Project (558cacb83ff34e9db608ade12bd52a5d) in parent group-v277280. [ 926.305764] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Creating folder: Instances. Parent ref: group-v277389. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 926.306082] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fcabcfc-06f3-4721-b737-73e889574a01 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.314052] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Created folder: Instances in parent group-v277389. [ 926.314291] env[61852]: DEBUG oslo.service.loopingcall [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.314481] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.314680] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4e309ec-03d8-4b4e-9d0f-e4396f46b1bc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.334210] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.334210] env[61852]: value = "task-1293160" [ 926.334210] env[61852]: _type = "Task" [ 926.334210] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.342115] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293154, 'name': Rename_Task, 'duration_secs': 0.165949} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.344984] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.345252] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293160, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.345451] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3dc6d7d-f05f-41d4-9b39-32a0e9537194 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.352092] env[61852]: DEBUG nova.compute.manager [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 926.354632] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 926.354632] env[61852]: value = "task-1293161" [ 926.354632] env[61852]: _type = "Task" [ 926.354632] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.362630] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.423060] env[61852]: DEBUG oslo_vmware.api [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293156, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215484} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.425815] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.426047] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.426278] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.426501] env[61852]: INFO nova.compute.manager [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Took 1.13 seconds to destroy the instance on the hypervisor. [ 926.426749] env[61852]: DEBUG oslo.service.loopingcall [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.426966] env[61852]: DEBUG nova.compute.manager [-] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 926.427079] env[61852]: DEBUG nova.network.neutron [-] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.434256] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293151, 'name': CreateVM_Task, 'duration_secs': 1.10307} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.434554] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 926.435412] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.435549] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.435948] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 926.436266] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e28db76-fee1-447f-a56d-c3340f109d9e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.441100] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 926.441100] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b3f2df-b754-0b4a-7493-55a41c953c3c" [ 926.441100] env[61852]: _type = "Task" [ 926.441100] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.449901] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b3f2df-b754-0b4a-7493-55a41c953c3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.522143] env[61852]: DEBUG nova.compute.manager [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Received event network-vif-plugged-2c88ebc1-acb3-41ba-8d38-8647755ce777 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.522371] env[61852]: DEBUG oslo_concurrency.lockutils [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] Acquiring lock "b99bacc1-21e7-4bbd-8092-549246500421-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.522585] env[61852]: DEBUG oslo_concurrency.lockutils [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] Lock "b99bacc1-21e7-4bbd-8092-549246500421-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.522761] env[61852]: DEBUG oslo_concurrency.lockutils [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] Lock "b99bacc1-21e7-4bbd-8092-549246500421-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.522928] env[61852]: DEBUG nova.compute.manager [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] No waiting events found dispatching network-vif-plugged-2c88ebc1-acb3-41ba-8d38-8647755ce777 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 926.523140] env[61852]: WARNING nova.compute.manager [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Received unexpected event network-vif-plugged-2c88ebc1-acb3-41ba-8d38-8647755ce777 for instance with vm_state building and task_state spawning. [ 926.523335] env[61852]: DEBUG nova.compute.manager [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Received event network-changed-2c88ebc1-acb3-41ba-8d38-8647755ce777 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.523494] env[61852]: DEBUG nova.compute.manager [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Refreshing instance network info cache due to event network-changed-2c88ebc1-acb3-41ba-8d38-8647755ce777. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 926.523679] env[61852]: DEBUG oslo_concurrency.lockutils [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] Acquiring lock "refresh_cache-b99bacc1-21e7-4bbd-8092-549246500421" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.523817] env[61852]: DEBUG oslo_concurrency.lockutils [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] Acquired lock "refresh_cache-b99bacc1-21e7-4bbd-8092-549246500421" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.523974] env[61852]: DEBUG nova.network.neutron [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Refreshing network info cache for port 2c88ebc1-acb3-41ba-8d38-8647755ce777 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.545926] env[61852]: DEBUG nova.scheduler.client.report [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.618305] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 926.618641] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0edeed9e-e0bf-43fa-bd16-1a213b3dec5c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.630264] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293157, 'name': ReconfigVM_Task, 'duration_secs': 0.291153} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.631871] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 51ecc9c3-a3fc-4bd7-8c90-003451700212/51ecc9c3-a3fc-4bd7-8c90-003451700212.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.632237] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance '51ecc9c3-a3fc-4bd7-8c90-003451700212' progress to 50 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 926.636235] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 926.636235] env[61852]: value = "task-1293162" [ 926.636235] env[61852]: _type = "Task" [ 926.636235] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.646070] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293162, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.844828] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293160, 'name': CreateVM_Task, 'duration_secs': 0.474664} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.846048] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 926.846395] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.868940] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293161, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.877828] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.951456] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b3f2df-b754-0b4a-7493-55a41c953c3c, 'name': SearchDatastore_Task, 'duration_secs': 0.013263} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.951559] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.951724] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 926.951968] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.952281] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.952511] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.952819] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.953157] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 926.953449] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf9b1b60-221f-461a-b43f-0966809ccfef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.955547] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f07ce189-91f7-44cc-b327-995b2a38b14c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.960181] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for the task: (returnval){ [ 926.960181] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]521b799f-f5f9-57fe-17b2-12ddf6541634" [ 926.960181] env[61852]: _type = "Task" [ 926.960181] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.963776] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.963957] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 926.964904] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7818ca30-0446-44bd-91cd-68d678bc3cb4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.969731] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]521b799f-f5f9-57fe-17b2-12ddf6541634, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.972439] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 926.972439] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52589fa1-07bb-b033-ef1f-c92749a2cba6" [ 926.972439] env[61852]: _type = "Task" [ 926.972439] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.979384] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52589fa1-07bb-b033-ef1f-c92749a2cba6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.051305] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.049s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.051867] env[61852]: DEBUG nova.compute.manager [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 927.054679] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.152s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.054905] env[61852]: DEBUG nova.objects.instance [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lazy-loading 'resources' on Instance uuid e97448d7-0162-44bf-95d1-93bdcbcaec25 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.145180] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327d7622-9285-43ae-ba52-44dc71a22c1e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.153252] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293162, 'name': PowerOffVM_Task, 'duration_secs': 0.245251} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.166619] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.170105] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d412f9a5-90f1-4329-bcfc-480d59f6675a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.172863] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b98e8a0-0226-48c1-b827-8b6561aa6284 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.203332] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1d8af6-530d-4529-beef-d095da85ede4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.205977] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance '51ecc9c3-a3fc-4bd7-8c90-003451700212' progress to 67 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 927.211808] env[61852]: DEBUG nova.network.neutron [-] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.246171] env[61852]: DEBUG nova.network.neutron [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Updated VIF entry in instance network info cache for port 2c88ebc1-acb3-41ba-8d38-8647755ce777. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 927.246830] env[61852]: DEBUG nova.network.neutron [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Updating instance_info_cache with network_info: [{"id": "2c88ebc1-acb3-41ba-8d38-8647755ce777", "address": "fa:16:3e:38:78:4b", "network": {"id": "3625b90d-5a52-41f5-958d-5de80d933746", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-256880241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "558cacb83ff34e9db608ade12bd52a5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c88ebc1-ac", "ovs_interfaceid": "2c88ebc1-acb3-41ba-8d38-8647755ce777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.367972] env[61852]: DEBUG oslo_vmware.api [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293161, 'name': PowerOnVM_Task, 'duration_secs': 0.817989} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.367972] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 927.367972] env[61852]: INFO nova.compute.manager [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Took 9.47 seconds to spawn the instance on the hypervisor. [ 927.368429] env[61852]: DEBUG nova.compute.manager [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 927.368916] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c9fa0d-1366-4208-b9a2-0c40ecffb8ce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.472316] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]521b799f-f5f9-57fe-17b2-12ddf6541634, 'name': SearchDatastore_Task, 'duration_secs': 0.015912} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.472869] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.473163] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 927.473359] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.480994] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52589fa1-07bb-b033-ef1f-c92749a2cba6, 'name': SearchDatastore_Task, 'duration_secs': 0.032223} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.481697] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-987fed1e-cdd8-4c0f-a261-db982b093101 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.486834] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 927.486834] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c1b074-dd27-ac6b-e26f-136ae090f85c" [ 927.486834] env[61852]: _type = "Task" [ 927.486834] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.493801] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c1b074-dd27-ac6b-e26f-136ae090f85c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.560740] env[61852]: DEBUG nova.compute.utils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 927.565164] env[61852]: DEBUG nova.compute.manager [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 927.565164] env[61852]: DEBUG nova.network.neutron [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 927.605672] env[61852]: DEBUG nova.policy [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f04d129452d4eb79514c52a6972af0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e93a6965a6884292bc56b01f7d54a622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 927.708117] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d686b8fc-3cb8-4bdb-b795-a843403ee3af {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.715397] env[61852]: INFO nova.compute.manager [-] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Took 1.29 seconds to deallocate network for instance. [ 927.721398] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Creating Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 927.721897] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-27e8e4fe-2af0-407d-9bec-16bf15795e8a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.724606] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5163383-6ab2-4fce-9c31-fee4e5aecb53 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.757735] env[61852]: DEBUG oslo_concurrency.lockutils [req-18f1d0db-77b9-41ce-ab05-ecb44167740f req-0dee16e0-6c11-4e8d-aa69-51f5d570d74e service nova] Releasing lock "refresh_cache-b99bacc1-21e7-4bbd-8092-549246500421" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.760111] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c405a4-82a6-4fa7-b308-7bbb084c67d8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.764352] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 927.764352] env[61852]: value = "task-1293163" [ 927.764352] env[61852]: _type = "Task" [ 927.764352] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.770502] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4011a0ed-5dc5-464b-881c-d10ca787e321 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.778218] env[61852]: DEBUG nova.network.neutron [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Port 61e94b93-d030-4c70-8ffc-ce81cbf29d01 binding to destination host cpu-1 is already ACTIVE {{(pid=61852) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 927.779663] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293163, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.790754] env[61852]: DEBUG nova.compute.provider_tree [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.888686] env[61852]: INFO nova.compute.manager [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Took 15.70 seconds to build instance. [ 927.929926] env[61852]: DEBUG nova.network.neutron [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Successfully created port: fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 927.997993] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c1b074-dd27-ac6b-e26f-136ae090f85c, 'name': SearchDatastore_Task, 'duration_secs': 0.03664} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.998338] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.998649] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075/8bdb8059-3fb5-4f9c-bc73-b85bf8a23075.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 927.998966] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.999185] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.999415] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c89c1da1-f39f-4cad-b51d-c6ad0b9346a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.001865] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66c192a7-b546-4a3c-ad9b-a98c7cee92f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.010132] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 928.010132] env[61852]: value = "task-1293164" [ 928.010132] env[61852]: _type = "Task" [ 928.010132] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.014249] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.014484] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 928.015397] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6e94623-52d1-4298-9e45-7e909cabdf67 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.022586] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.026155] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for the task: (returnval){ [ 928.026155] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52117056-f3c0-1fb9-01e5-05762fc76c5f" [ 928.026155] env[61852]: _type = "Task" [ 928.026155] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.033431] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52117056-f3c0-1fb9-01e5-05762fc76c5f, 'name': SearchDatastore_Task, 'duration_secs': 0.009128} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.034246] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fefb3a8-c0d9-4f3d-a628-01e3938dce33 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.040075] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for the task: (returnval){ [ 928.040075] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529c8871-6e66-ac6b-ba3f-eb227ea4a360" [ 928.040075] env[61852]: _type = "Task" [ 928.040075] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.048341] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529c8871-6e66-ac6b-ba3f-eb227ea4a360, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.065691] env[61852]: DEBUG nova.compute.manager [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 928.223317] env[61852]: DEBUG oslo_concurrency.lockutils [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.273630] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293163, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.294207] env[61852]: DEBUG nova.scheduler.client.report [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.388242] env[61852]: DEBUG oslo_concurrency.lockutils [None req-403f88e9-6223-4d6f-a763-1f714d7559ac tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "cc5e0467-2960-43a1-bd7b-a528d5788028" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.209s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.520204] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293164, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.547133] env[61852]: DEBUG nova.compute.manager [req-682f6aab-5c87-4ed3-b30a-4eec928d8750 req-b0698943-5f0b-414e-9778-692cd27aca16 service nova] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Received event network-vif-deleted-40eb747f-021a-4082-9f8d-70a6af6a415e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.552067] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529c8871-6e66-ac6b-ba3f-eb227ea4a360, 'name': SearchDatastore_Task, 'duration_secs': 0.008868} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.552374] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.552666] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] b99bacc1-21e7-4bbd-8092-549246500421/b99bacc1-21e7-4bbd-8092-549246500421.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 928.552952] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eeb49c22-b772-4f4b-9e25-fe1a5bbbfa64 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.560023] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for the task: (returnval){ [ 928.560023] env[61852]: value = "task-1293165" [ 928.560023] env[61852]: _type = "Task" [ 928.560023] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.568396] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293165, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.764404] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "561d33d0-cad5-48ae-bd32-5de2220c5283" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.764647] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "561d33d0-cad5-48ae-bd32-5de2220c5283" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.777923] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293163, 'name': CreateSnapshot_Task, 'duration_secs': 0.593562} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.778780] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Created Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 928.779514] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917f5eb4-86de-4693-a32b-50f4bc81f295 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.796397] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "51ecc9c3-a3fc-4bd7-8c90-003451700212-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.796625] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.796799] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.801855] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.747s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.807606] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.930s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.809207] env[61852]: INFO nova.compute.claims [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.823767] env[61852]: INFO nova.scheduler.client.report [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Deleted allocations for instance e97448d7-0162-44bf-95d1-93bdcbcaec25 [ 929.020176] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555044} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.020463] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075/8bdb8059-3fb5-4f9c-bc73-b85bf8a23075.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 929.020689] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 929.020943] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55df2393-8147-4097-a91a-23883a61e22d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.027613] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 929.027613] env[61852]: value = "task-1293166" [ 929.027613] env[61852]: _type = "Task" [ 929.027613] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.035913] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.069816] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293165, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.074532] env[61852]: DEBUG nova.compute.manager [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 929.099806] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 929.100062] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 929.100243] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.100437] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 929.100585] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.100753] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 929.100965] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 929.101144] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 929.101356] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 929.101528] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 929.101704] env[61852]: DEBUG nova.virt.hardware [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 929.102549] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805723f3-1f89-4776-9cd6-f53934b58df4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.110098] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341ec31b-6e23-4ad3-94b9-edbd1dd89a67 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.268512] env[61852]: DEBUG nova.compute.manager [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 929.308995] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Creating linked-clone VM from snapshot {{(pid=61852) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 929.310730] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bad22e5d-99dc-43c3-b1ed-a86192723195 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.321959] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 929.321959] env[61852]: value = "task-1293167" [ 929.321959] env[61852]: _type = "Task" [ 929.321959] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.334577] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293167, 'name': CloneVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.335020] env[61852]: DEBUG oslo_concurrency.lockutils [None req-7808bfdf-529f-46bc-a80a-d91f9505fa5a tempest-InstanceActionsNegativeTestJSON-842929487 tempest-InstanceActionsNegativeTestJSON-842929487-project-member] Lock "e97448d7-0162-44bf-95d1-93bdcbcaec25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.340s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.537441] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06346} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.537719] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 929.538560] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17646eb9-3191-41b7-8a9d-4f4eb17e2c73 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.562337] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075/8bdb8059-3fb5-4f9c-bc73-b85bf8a23075.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 929.562720] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c1e9c7d-c27a-4791-aae0-cdcbea43398e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.585925] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293165, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.587204] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 929.587204] env[61852]: value = "task-1293168" [ 929.587204] env[61852]: _type = "Task" [ 929.587204] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.595094] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293168, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.727565] env[61852]: DEBUG nova.network.neutron [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Successfully updated port: fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 929.789625] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.834744] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293167, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.885047] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.886046] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.886046] env[61852]: DEBUG nova.network.neutron [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.001742] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e508816-6f40-4fd6-97a9-f84fbadbd148 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.012186] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2cba5d-3d83-462a-b8b5-352965e8a7b3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.048229] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aadf9608-bcce-4583-8944-078046eb6e4c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.058057] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47336df5-f471-4a32-8701-625c8dce4f25 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.074465] env[61852]: DEBUG nova.compute.provider_tree [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.082126] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293165, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.097686] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293168, 'name': ReconfigVM_Task, 'duration_secs': 0.39028} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.098019] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075/8bdb8059-3fb5-4f9c-bc73-b85bf8a23075.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 930.098698] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32ac4cfa-e873-46bb-803f-9074f1bd7ef1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.104701] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 930.104701] env[61852]: value = "task-1293169" [ 930.104701] env[61852]: _type = "Task" [ 930.104701] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.117822] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293169, 'name': Rename_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.230915] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.231219] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.231338] env[61852]: DEBUG nova.network.neutron [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.333794] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293167, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.575356] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293165, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.534804} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.575677] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] b99bacc1-21e7-4bbd-8092-549246500421/b99bacc1-21e7-4bbd-8092-549246500421.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.575918] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.576229] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d63fe2e4-59a2-41e2-887b-2edeedf3af17 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.580555] env[61852]: DEBUG nova.compute.manager [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Received event network-vif-plugged-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.580772] env[61852]: DEBUG oslo_concurrency.lockutils [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] Acquiring lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.581018] env[61852]: DEBUG oslo_concurrency.lockutils [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] Lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.581202] env[61852]: DEBUG oslo_concurrency.lockutils [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] Lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.581378] env[61852]: DEBUG nova.compute.manager [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] No waiting events found dispatching network-vif-plugged-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 930.581544] env[61852]: WARNING nova.compute.manager [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Received unexpected event network-vif-plugged-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 for instance with vm_state building and task_state spawning. [ 930.581704] env[61852]: DEBUG nova.compute.manager [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Received event network-changed-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.581938] env[61852]: DEBUG nova.compute.manager [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Refreshing instance network info cache due to event network-changed-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 930.582042] env[61852]: DEBUG oslo_concurrency.lockutils [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] Acquiring lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.584153] env[61852]: DEBUG nova.scheduler.client.report [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.591028] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for the task: (returnval){ [ 930.591028] env[61852]: value = "task-1293170" [ 930.591028] env[61852]: _type = "Task" [ 930.591028] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.597431] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.607771] env[61852]: DEBUG nova.network.neutron [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance_info_cache with network_info: [{"id": "61e94b93-d030-4c70-8ffc-ce81cbf29d01", "address": "fa:16:3e:23:a2:0c", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61e94b93-d0", "ovs_interfaceid": "61e94b93-d030-4c70-8ffc-ce81cbf29d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.620152] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293169, 'name': Rename_Task, 'duration_secs': 0.132789} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.620477] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 930.620717] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea731969-d254-4068-95db-ea651ef13c7d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.628022] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 930.628022] env[61852]: value = "task-1293171" [ 930.628022] env[61852]: _type = "Task" [ 930.628022] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.634726] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293171, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.764935] env[61852]: DEBUG nova.network.neutron [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.839094] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293167, 'name': CloneVM_Task, 'duration_secs': 1.494908} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.839535] env[61852]: INFO nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Created linked-clone VM from snapshot [ 930.840380] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef119a3-c1d3-4f44-984e-8ac2402c9433 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.851260] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Uploading image f5e5a587-44f8-4b6c-b924-cca27583fcf9 {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 930.880591] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 930.880591] env[61852]: value = "vm-277393" [ 930.880591] env[61852]: _type = "VirtualMachine" [ 930.880591] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 930.880904] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-181a602d-3f60-4e21-a6e3-de0097086116 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.891498] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lease: (returnval){ [ 930.891498] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525eb0f3-0497-97f8-8362-6ab0c2a80b3b" [ 930.891498] env[61852]: _type = "HttpNfcLease" [ 930.891498] env[61852]: } obtained for exporting VM: (result){ [ 930.891498] env[61852]: value = "vm-277393" [ 930.891498] env[61852]: _type = "VirtualMachine" [ 930.891498] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 930.891878] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the lease: (returnval){ [ 930.891878] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525eb0f3-0497-97f8-8362-6ab0c2a80b3b" [ 930.891878] env[61852]: _type = "HttpNfcLease" [ 930.891878] env[61852]: } to be ready. {{(pid=61852) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 930.899164] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 930.899164] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525eb0f3-0497-97f8-8362-6ab0c2a80b3b" [ 930.899164] env[61852]: _type = "HttpNfcLease" [ 930.899164] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 930.922722] env[61852]: DEBUG nova.network.neutron [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updating instance_info_cache with network_info: [{"id": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "address": "fa:16:3e:fe:46:c7", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb4d01a4-4b", "ovs_interfaceid": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.089054] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.089824] env[61852]: DEBUG nova.compute.manager [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 931.093718] env[61852]: DEBUG oslo_concurrency.lockutils [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.871s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.094053] env[61852]: DEBUG nova.objects.instance [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lazy-loading 'resources' on Instance uuid 254919cb-e3cd-4288-8696-95e632d78a38 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.109163] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123551} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.111093] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 931.111953] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b18cb9-7467-4127-bd3d-5aff3d0a8119 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.116319] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.152756] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] b99bacc1-21e7-4bbd-8092-549246500421/b99bacc1-21e7-4bbd-8092-549246500421.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.156595] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-660bb924-f409-4170-b9ab-d9c268ff3085 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.178790] env[61852]: DEBUG oslo_vmware.api [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293171, 'name': PowerOnVM_Task, 'duration_secs': 0.481114} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.180348] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 931.181249] env[61852]: INFO nova.compute.manager [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Took 9.19 seconds to spawn the instance on the hypervisor. [ 931.181249] env[61852]: DEBUG nova.compute.manager [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 931.181249] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for the task: (returnval){ [ 931.181249] env[61852]: value = "task-1293173" [ 931.181249] env[61852]: _type = "Task" [ 931.181249] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.181837] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad77fd0-e799-4e7d-b141-39432eb1446b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.195422] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293173, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.399271] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 931.399271] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525eb0f3-0497-97f8-8362-6ab0c2a80b3b" [ 931.399271] env[61852]: _type = "HttpNfcLease" [ 931.399271] env[61852]: } is ready. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 931.399584] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 931.399584] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525eb0f3-0497-97f8-8362-6ab0c2a80b3b" [ 931.399584] env[61852]: _type = "HttpNfcLease" [ 931.399584] env[61852]: }. {{(pid=61852) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 931.400337] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3902213-ced4-4256-bc39-f903198f7884 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.408275] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5267b81d-cd58-a4ba-da6e-f5ee6cf683cd/disk-0.vmdk from lease info. {{(pid=61852) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 931.408461] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5267b81d-cd58-a4ba-da6e-f5ee6cf683cd/disk-0.vmdk for reading. {{(pid=61852) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 931.469313] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.469313] env[61852]: DEBUG nova.compute.manager [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Instance network_info: |[{"id": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "address": "fa:16:3e:fe:46:c7", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb4d01a4-4b", "ovs_interfaceid": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 931.469313] env[61852]: DEBUG oslo_concurrency.lockutils [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] Acquired lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.469795] env[61852]: DEBUG nova.network.neutron [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Refreshing network info cache for port fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 931.471385] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:46:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb4d01a4-4b0f-4591-aaf9-f8487c4cd460', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 931.481677] env[61852]: DEBUG oslo.service.loopingcall [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 931.484485] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 931.485177] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94bb3e55-30d1-43d1-92cb-ad687dcd6b92 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.508940] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 931.508940] env[61852]: value = "task-1293174" [ 931.508940] env[61852]: _type = "Task" [ 931.508940] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.518178] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293174, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.549575] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-536cfaaa-7ab5-46c4-88e5-3a34ac6976e9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.606306] env[61852]: DEBUG nova.compute.utils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 931.606306] env[61852]: DEBUG nova.compute.manager [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 931.606306] env[61852]: DEBUG nova.network.neutron [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 931.641894] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40445097-9927-468f-b8bf-e154a9fa6438 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.664276] env[61852]: DEBUG nova.policy [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5db98c1126cc41b5930b2e5fa823c330', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '783bc6968c91488293479f10b8dc92c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 931.668529] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdb39f1-f2fa-4995-9f40-d8ec6402b171 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.675951] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance '51ecc9c3-a3fc-4bd7-8c90-003451700212' progress to 83 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 931.693840] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293173, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.709974] env[61852]: INFO nova.compute.manager [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Took 15.18 seconds to build instance. [ 931.809460] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d04205-b1d6-44cf-bb08-01ec59d5b7af {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.817694] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f31a45-5d19-4b6f-8fc5-ffe2b823ead8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.848497] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0335e23-b82b-4392-be6c-9c33b5f452f6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.856153] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9d0e66-1ec8-4b20-84e8-cb32441e5843 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.871390] env[61852]: DEBUG nova.compute.provider_tree [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 931.969549] env[61852]: DEBUG nova.network.neutron [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Successfully created port: 5b69df93-12bd-4374-9aa2-76e5c7e7ddb9 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 932.019765] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293174, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.108726] env[61852]: DEBUG nova.compute.manager [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 932.182622] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.183347] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48ea7f88-142f-4f91-9da3-8075a6e36a88 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.196328] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293173, 'name': ReconfigVM_Task, 'duration_secs': 0.532306} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.197769] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Reconfigured VM instance instance-00000057 to attach disk [datastore2] b99bacc1-21e7-4bbd-8092-549246500421/b99bacc1-21e7-4bbd-8092-549246500421.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.198529] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 932.198529] env[61852]: value = "task-1293175" [ 932.198529] env[61852]: _type = "Task" [ 932.198529] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.198814] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c459df55-db04-4016-92db-63fa881d86cb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.212606] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293175, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.212606] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2321ac75-ebb3-4f91-99f5-325a8f59fa35 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.688s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.212606] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for the task: (returnval){ [ 932.212606] env[61852]: value = "task-1293176" [ 932.212606] env[61852]: _type = "Task" [ 932.212606] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.222955] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293176, 'name': Rename_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.348581] env[61852]: DEBUG nova.network.neutron [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updated VIF entry in instance network info cache for port fb4d01a4-4b0f-4591-aaf9-f8487c4cd460. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 932.349419] env[61852]: DEBUG nova.network.neutron [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updating instance_info_cache with network_info: [{"id": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "address": "fa:16:3e:fe:46:c7", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb4d01a4-4b", "ovs_interfaceid": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.399232] env[61852]: ERROR nova.scheduler.client.report [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [req-562395d5-e933-43a3-b735-69a71caaaab2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-562395d5-e933-43a3-b735-69a71caaaab2"}]} [ 932.421295] env[61852]: DEBUG nova.scheduler.client.report [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 932.438834] env[61852]: DEBUG nova.scheduler.client.report [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 932.439108] env[61852]: DEBUG nova.compute.provider_tree [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 932.453792] env[61852]: DEBUG nova.scheduler.client.report [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 932.474032] env[61852]: DEBUG nova.scheduler.client.report [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 932.523237] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293174, 'name': CreateVM_Task, 'duration_secs': 0.528049} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.525494] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 932.526653] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.526853] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.527283] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 932.527622] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f98cad1f-a5b7-4933-8ee8-202b3dec9c50 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.533688] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 932.533688] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520624a1-ca90-51d2-342e-f141966784eb" [ 932.533688] env[61852]: _type = "Task" [ 932.533688] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.544962] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520624a1-ca90-51d2-342e-f141966784eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.657894] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5f2200-1033-435c-a973-cee3217cafce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.666395] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b76d38e-0b32-4f3b-b56e-b1f36d2b7d31 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.700382] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc77a26-0df5-4d7c-ba28-42c1e325eb85 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.714959] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df67194-2341-4024-b33f-40f3c32dcef0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.719302] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293175, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.728362] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293176, 'name': Rename_Task, 'duration_secs': 0.206625} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.737238] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.737882] env[61852]: DEBUG nova.compute.provider_tree [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 932.739297] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d146802-4614-4362-a82c-46c7ff03d489 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.746528] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for the task: (returnval){ [ 932.746528] env[61852]: value = "task-1293177" [ 932.746528] env[61852]: _type = "Task" [ 932.746528] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.756616] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.855151] env[61852]: DEBUG oslo_concurrency.lockutils [req-c2c2b4f7-9f4f-48a4-afde-b8f1e8d4580c req-8e5c22fa-8b65-417d-99a6-c4c811183b24 service nova] Releasing lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.026549] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f6c80d-be7f-4d2d-b725-b24a2d076f2e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.035032] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-62280d4e-53e9-4175-aa5e-c7a2258b68d6 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Suspending the VM {{(pid=61852) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 933.037413] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-7e47cd1a-89c6-4b16-a280-fc1549a4ec07 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.045591] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520624a1-ca90-51d2-342e-f141966784eb, 'name': SearchDatastore_Task, 'duration_secs': 0.0127} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.047302] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.047749] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.048262] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.048750] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.049152] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.049897] env[61852]: DEBUG oslo_vmware.api [None req-62280d4e-53e9-4175-aa5e-c7a2258b68d6 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 933.049897] env[61852]: value = "task-1293178" [ 933.049897] env[61852]: _type = "Task" [ 933.049897] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.050942] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46f39ce0-e234-4ff3-99fb-782a94128826 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.061621] env[61852]: DEBUG oslo_vmware.api [None req-62280d4e-53e9-4175-aa5e-c7a2258b68d6 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293178, 'name': SuspendVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.071192] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.071435] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.072307] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e688cbe-a8cd-483f-beb5-9ff3d4f7f551 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.078720] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 933.078720] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5294d00b-97c3-12eb-aac2-f320ba2538b0" [ 933.078720] env[61852]: _type = "Task" [ 933.078720] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.089226] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5294d00b-97c3-12eb-aac2-f320ba2538b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.120209] env[61852]: DEBUG nova.compute.manager [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 933.176590] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 933.176993] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 933.177191] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 933.177501] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 933.177766] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 933.179882] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 933.180237] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 933.180519] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 933.181166] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 933.181436] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 933.181731] env[61852]: DEBUG nova.virt.hardware [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 933.182879] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd08bfea-3197-4c04-842e-842e0ec5f723 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.193517] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb632be-ebf1-4f90-84d7-ab538378e506 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.219265] env[61852]: DEBUG oslo_vmware.api [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293175, 'name': PowerOnVM_Task, 'duration_secs': 0.583962} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.219721] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.220009] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ed797702-4f77-4b9f-b8ef-838afdb6f620 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance '51ecc9c3-a3fc-4bd7-8c90-003451700212' progress to 100 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 933.257772] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293177, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.274993] env[61852]: DEBUG nova.scheduler.client.report [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 105 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 933.276473] env[61852]: DEBUG nova.compute.provider_tree [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 105 to 106 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 933.276473] env[61852]: DEBUG nova.compute.provider_tree [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.564669] env[61852]: DEBUG oslo_vmware.api [None req-62280d4e-53e9-4175-aa5e-c7a2258b68d6 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293178, 'name': SuspendVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.589197] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5294d00b-97c3-12eb-aac2-f320ba2538b0, 'name': SearchDatastore_Task, 'duration_secs': 0.010534} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.591248] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bb928ad-76b9-49e0-b923-eaaafff27f1c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.594821] env[61852]: DEBUG nova.compute.manager [req-c97fa0e6-14a9-49d6-a7d4-18f44ef70599 req-5940220a-c70a-4d73-b7f0-d48574b34a03 service nova] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Received event network-vif-plugged-5b69df93-12bd-4374-9aa2-76e5c7e7ddb9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 933.595131] env[61852]: DEBUG oslo_concurrency.lockutils [req-c97fa0e6-14a9-49d6-a7d4-18f44ef70599 req-5940220a-c70a-4d73-b7f0-d48574b34a03 service nova] Acquiring lock "ba863c60-444a-4959-8f8f-87b4952d2872-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.595498] env[61852]: DEBUG oslo_concurrency.lockutils [req-c97fa0e6-14a9-49d6-a7d4-18f44ef70599 req-5940220a-c70a-4d73-b7f0-d48574b34a03 service nova] Lock "ba863c60-444a-4959-8f8f-87b4952d2872-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.595938] env[61852]: DEBUG oslo_concurrency.lockutils [req-c97fa0e6-14a9-49d6-a7d4-18f44ef70599 req-5940220a-c70a-4d73-b7f0-d48574b34a03 service nova] Lock "ba863c60-444a-4959-8f8f-87b4952d2872-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.596156] env[61852]: DEBUG nova.compute.manager [req-c97fa0e6-14a9-49d6-a7d4-18f44ef70599 req-5940220a-c70a-4d73-b7f0-d48574b34a03 service nova] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] No waiting events found dispatching network-vif-plugged-5b69df93-12bd-4374-9aa2-76e5c7e7ddb9 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 933.597775] env[61852]: WARNING nova.compute.manager [req-c97fa0e6-14a9-49d6-a7d4-18f44ef70599 req-5940220a-c70a-4d73-b7f0-d48574b34a03 service nova] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Received unexpected event network-vif-plugged-5b69df93-12bd-4374-9aa2-76e5c7e7ddb9 for instance with vm_state building and task_state spawning. [ 933.600774] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 933.600774] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52231fb1-7034-680d-f853-d0c0e6bd4a31" [ 933.600774] env[61852]: _type = "Task" [ 933.600774] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.609610] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52231fb1-7034-680d-f853-d0c0e6bd4a31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.678308] env[61852]: DEBUG nova.network.neutron [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Successfully updated port: 5b69df93-12bd-4374-9aa2-76e5c7e7ddb9 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.759031] env[61852]: DEBUG oslo_vmware.api [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293177, 'name': PowerOnVM_Task, 'duration_secs': 0.800319} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.761486] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.761486] env[61852]: INFO nova.compute.manager [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Took 9.51 seconds to spawn the instance on the hypervisor. [ 933.761486] env[61852]: DEBUG nova.compute.manager [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 933.761486] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbe8196-856b-4b8a-889d-ace26f174e64 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.782195] env[61852]: DEBUG oslo_concurrency.lockutils [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.688s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.785808] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.996s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.788350] env[61852]: INFO nova.compute.claims [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.815912] env[61852]: INFO nova.scheduler.client.report [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleted allocations for instance 254919cb-e3cd-4288-8696-95e632d78a38 [ 934.064816] env[61852]: DEBUG oslo_vmware.api [None req-62280d4e-53e9-4175-aa5e-c7a2258b68d6 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293178, 'name': SuspendVM_Task, 'duration_secs': 0.881063} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.065171] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-62280d4e-53e9-4175-aa5e-c7a2258b68d6 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Suspended the VM {{(pid=61852) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 934.065440] env[61852]: DEBUG nova.compute.manager [None req-62280d4e-53e9-4175-aa5e-c7a2258b68d6 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 934.066284] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6899c4c-c698-41a2-b920-26fa4bf9b911 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.113458] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52231fb1-7034-680d-f853-d0c0e6bd4a31, 'name': SearchDatastore_Task, 'duration_secs': 0.028444} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.113812] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.114136] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] d58958f2-7b6f-4480-9710-aa9e67ebd37c/d58958f2-7b6f-4480-9710-aa9e67ebd37c.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.114402] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-959c6580-aaae-4153-b301-03794d951925 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.122293] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 934.122293] env[61852]: value = "task-1293179" [ 934.122293] env[61852]: _type = "Task" [ 934.122293] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.131312] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293179, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.181285] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "refresh_cache-ba863c60-444a-4959-8f8f-87b4952d2872" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.181499] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "refresh_cache-ba863c60-444a-4959-8f8f-87b4952d2872" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.181584] env[61852]: DEBUG nova.network.neutron [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 934.279957] env[61852]: INFO nova.compute.manager [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Took 16.77 seconds to build instance. [ 934.325701] env[61852]: DEBUG oslo_concurrency.lockutils [None req-222dab93-229c-41e6-aceb-91c5aae3d934 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "254919cb-e3cd-4288-8696-95e632d78a38" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.062s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.635682] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293179, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.719759] env[61852]: DEBUG nova.network.neutron [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.782155] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9e2736ab-75db-435c-9716-fc1d518b3810 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lock "b99bacc1-21e7-4bbd-8092-549246500421" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.285s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.923447] env[61852]: DEBUG nova.network.neutron [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Updating instance_info_cache with network_info: [{"id": "5b69df93-12bd-4374-9aa2-76e5c7e7ddb9", "address": "fa:16:3e:50:91:09", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b69df93-12", "ovs_interfaceid": "5b69df93-12bd-4374-9aa2-76e5c7e7ddb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.983608] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c32363-e713-4af3-851d-3120290f5c17 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.993037] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512ae128-a4b9-4a94-86a3-96c6f7b9339a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.032782] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcea949-dddc-4d44-9fe3-03fa2ac04c70 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.041637] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62efccfa-3e6e-4a89-b2a9-7db9296fb4e1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.057224] env[61852]: DEBUG nova.compute.provider_tree [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.135739] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293179, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768152} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.136135] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] d58958f2-7b6f-4480-9710-aa9e67ebd37c/d58958f2-7b6f-4480-9710-aa9e67ebd37c.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.136272] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.136619] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a98bfa82-aff8-4f5f-ab10-64e2c0fc91ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.143576] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 935.143576] env[61852]: value = "task-1293180" [ 935.143576] env[61852]: _type = "Task" [ 935.143576] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.154463] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293180, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.426244] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "refresh_cache-ba863c60-444a-4959-8f8f-87b4952d2872" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.426579] env[61852]: DEBUG nova.compute.manager [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Instance network_info: |[{"id": "5b69df93-12bd-4374-9aa2-76e5c7e7ddb9", "address": "fa:16:3e:50:91:09", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b69df93-12", "ovs_interfaceid": "5b69df93-12bd-4374-9aa2-76e5c7e7ddb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 935.427044] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:91:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3e0aae3-33d1-403b-bfaf-306f77a1422e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b69df93-12bd-4374-9aa2-76e5c7e7ddb9', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 935.435620] env[61852]: DEBUG oslo.service.loopingcall [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.435858] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 935.436107] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d613458-8395-482f-ad8e-02a6d74490b8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.458310] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 935.458310] env[61852]: value = "task-1293181" [ 935.458310] env[61852]: _type = "Task" [ 935.458310] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.467667] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293181, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.479958] env[61852]: DEBUG nova.compute.manager [req-12035ba6-628d-4810-8a00-2264d1a10b0b req-afde05c7-a220-4e84-bf65-88bf0f030317 service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Received event network-changed-2c88ebc1-acb3-41ba-8d38-8647755ce777 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 935.480096] env[61852]: DEBUG nova.compute.manager [req-12035ba6-628d-4810-8a00-2264d1a10b0b req-afde05c7-a220-4e84-bf65-88bf0f030317 service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Refreshing instance network info cache due to event network-changed-2c88ebc1-acb3-41ba-8d38-8647755ce777. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 935.480341] env[61852]: DEBUG oslo_concurrency.lockutils [req-12035ba6-628d-4810-8a00-2264d1a10b0b req-afde05c7-a220-4e84-bf65-88bf0f030317 service nova] Acquiring lock "refresh_cache-b99bacc1-21e7-4bbd-8092-549246500421" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.480509] env[61852]: DEBUG oslo_concurrency.lockutils [req-12035ba6-628d-4810-8a00-2264d1a10b0b req-afde05c7-a220-4e84-bf65-88bf0f030317 service nova] Acquired lock "refresh_cache-b99bacc1-21e7-4bbd-8092-549246500421" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.480674] env[61852]: DEBUG nova.network.neutron [req-12035ba6-628d-4810-8a00-2264d1a10b0b req-afde05c7-a220-4e84-bf65-88bf0f030317 service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Refreshing network info cache for port 2c88ebc1-acb3-41ba-8d38-8647755ce777 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 935.561938] env[61852]: DEBUG nova.scheduler.client.report [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 935.620636] env[61852]: DEBUG nova.compute.manager [req-46a762f0-409c-4be1-88bd-1fe68c085bf2 req-dfef9198-567d-4d84-ba33-88fdb2d5858d service nova] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Received event network-changed-5b69df93-12bd-4374-9aa2-76e5c7e7ddb9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 935.621266] env[61852]: DEBUG nova.compute.manager [req-46a762f0-409c-4be1-88bd-1fe68c085bf2 req-dfef9198-567d-4d84-ba33-88fdb2d5858d service nova] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Refreshing instance network info cache due to event network-changed-5b69df93-12bd-4374-9aa2-76e5c7e7ddb9. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 935.621266] env[61852]: DEBUG oslo_concurrency.lockutils [req-46a762f0-409c-4be1-88bd-1fe68c085bf2 req-dfef9198-567d-4d84-ba33-88fdb2d5858d service nova] Acquiring lock "refresh_cache-ba863c60-444a-4959-8f8f-87b4952d2872" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.621266] env[61852]: DEBUG oslo_concurrency.lockutils [req-46a762f0-409c-4be1-88bd-1fe68c085bf2 req-dfef9198-567d-4d84-ba33-88fdb2d5858d service nova] Acquired lock "refresh_cache-ba863c60-444a-4959-8f8f-87b4952d2872" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.621466] env[61852]: DEBUG nova.network.neutron [req-46a762f0-409c-4be1-88bd-1fe68c085bf2 req-dfef9198-567d-4d84-ba33-88fdb2d5858d service nova] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Refreshing network info cache for port 5b69df93-12bd-4374-9aa2-76e5c7e7ddb9 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 935.653377] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293180, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098767} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.653667] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.654585] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12be1f2-da94-4138-940b-554e0e3e17b4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.669331] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "51ecc9c3-a3fc-4bd7-8c90-003451700212" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.669659] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.669874] env[61852]: DEBUG nova.compute.manager [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Going to confirm migration 1 {{(pid=61852) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 935.680290] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] d58958f2-7b6f-4480-9710-aa9e67ebd37c/d58958f2-7b6f-4480-9710-aa9e67ebd37c.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.680950] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67419342-172c-4f54-9ece-26853da195cc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.702692] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 935.702692] env[61852]: value = "task-1293182" [ 935.702692] env[61852]: _type = "Task" [ 935.702692] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.712402] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293182, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.820988] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.820988] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.821185] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.821373] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.821512] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.823762] env[61852]: INFO nova.compute.manager [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Terminating instance [ 935.826249] env[61852]: DEBUG nova.compute.manager [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 935.826887] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.827253] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d880cad7-561b-493d-8239-66784ec2bd51 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.835127] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 935.835437] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6990f766-2293-4550-9ec2-44191413d59c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.915276] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 935.916218] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 935.916218] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleting the datastore file [datastore2] 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 935.916218] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-340c97c6-5b19-49ba-b2a7-e7db8962c6a2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.923184] env[61852]: DEBUG oslo_vmware.api [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 935.923184] env[61852]: value = "task-1293184" [ 935.923184] env[61852]: _type = "Task" [ 935.923184] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.934585] env[61852]: DEBUG oslo_vmware.api [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.968058] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293181, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.072433] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.287s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.072988] env[61852]: DEBUG nova.compute.manager [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 936.213108] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293182, 'name': ReconfigVM_Task, 'duration_secs': 0.481454} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.213672] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Reconfigured VM instance instance-00000058 to attach disk [datastore2] d58958f2-7b6f-4480-9710-aa9e67ebd37c/d58958f2-7b6f-4480-9710-aa9e67ebd37c.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.214141] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0cc7c079-d2b1-4d3e-85ea-547a13b70224 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.221399] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 936.221399] env[61852]: value = "task-1293185" [ 936.221399] env[61852]: _type = "Task" [ 936.221399] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.230537] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293185, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.246220] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.246509] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.246838] env[61852]: DEBUG nova.network.neutron [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.246987] env[61852]: DEBUG nova.objects.instance [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lazy-loading 'info_cache' on Instance uuid 51ecc9c3-a3fc-4bd7-8c90-003451700212 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.434314] env[61852]: DEBUG oslo_vmware.api [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293184, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277533} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.434683] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.434781] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.434957] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.435155] env[61852]: INFO nova.compute.manager [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Took 0.61 seconds to destroy the instance on the hypervisor. [ 936.435436] env[61852]: DEBUG oslo.service.loopingcall [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.435651] env[61852]: DEBUG nova.compute.manager [-] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 936.435766] env[61852]: DEBUG nova.network.neutron [-] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 936.470884] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293181, 'name': CreateVM_Task, 'duration_secs': 0.522756} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.471398] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 936.472196] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.472542] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.473157] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 936.475461] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22a406d7-20e3-45eb-a180-7a003d686f50 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.479242] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 936.479242] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527f36dc-e3b9-8bf5-54ab-3ef8738e3cc2" [ 936.479242] env[61852]: _type = "Task" [ 936.479242] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.484861] env[61852]: DEBUG nova.network.neutron [req-12035ba6-628d-4810-8a00-2264d1a10b0b req-afde05c7-a220-4e84-bf65-88bf0f030317 service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Updated VIF entry in instance network info cache for port 2c88ebc1-acb3-41ba-8d38-8647755ce777. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 936.485270] env[61852]: DEBUG nova.network.neutron [req-12035ba6-628d-4810-8a00-2264d1a10b0b req-afde05c7-a220-4e84-bf65-88bf0f030317 service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Updating instance_info_cache with network_info: [{"id": "2c88ebc1-acb3-41ba-8d38-8647755ce777", "address": "fa:16:3e:38:78:4b", "network": {"id": "3625b90d-5a52-41f5-958d-5de80d933746", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-256880241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "558cacb83ff34e9db608ade12bd52a5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c88ebc1-ac", "ovs_interfaceid": "2c88ebc1-acb3-41ba-8d38-8647755ce777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.491716] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527f36dc-e3b9-8bf5-54ab-3ef8738e3cc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.578089] env[61852]: DEBUG nova.compute.utils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 936.579522] env[61852]: DEBUG nova.compute.manager [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 936.579689] env[61852]: DEBUG nova.network.neutron [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 936.584905] env[61852]: DEBUG nova.network.neutron [req-46a762f0-409c-4be1-88bd-1fe68c085bf2 req-dfef9198-567d-4d84-ba33-88fdb2d5858d service nova] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Updated VIF entry in instance network info cache for port 5b69df93-12bd-4374-9aa2-76e5c7e7ddb9. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 936.585070] env[61852]: DEBUG nova.network.neutron [req-46a762f0-409c-4be1-88bd-1fe68c085bf2 req-dfef9198-567d-4d84-ba33-88fdb2d5858d service nova] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Updating instance_info_cache with network_info: [{"id": "5b69df93-12bd-4374-9aa2-76e5c7e7ddb9", "address": "fa:16:3e:50:91:09", "network": {"id": "5c538b43-cd66-41dd-b7f8-8d7f49060f2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1279580713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "783bc6968c91488293479f10b8dc92c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b69df93-12", "ovs_interfaceid": "5b69df93-12bd-4374-9aa2-76e5c7e7ddb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.628068] env[61852]: DEBUG nova.policy [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1349b8262e345068742af657fa8cbd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4dbb543c66364861bf5f437c8c33a550', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 936.732443] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293185, 'name': Rename_Task, 'duration_secs': 0.195867} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.732748] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.733041] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34a91fc4-8e29-430b-ab2b-46c51f76a8e4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.740808] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 936.740808] env[61852]: value = "task-1293186" [ 936.740808] env[61852]: _type = "Task" [ 936.740808] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.754200] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293186, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.893432] env[61852]: DEBUG nova.network.neutron [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Successfully created port: 983b26a7-ea32-4616-8527-24b24ecdb040 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.988933] env[61852]: DEBUG oslo_concurrency.lockutils [req-12035ba6-628d-4810-8a00-2264d1a10b0b req-afde05c7-a220-4e84-bf65-88bf0f030317 service nova] Releasing lock "refresh_cache-b99bacc1-21e7-4bbd-8092-549246500421" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.989407] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527f36dc-e3b9-8bf5-54ab-3ef8738e3cc2, 'name': SearchDatastore_Task, 'duration_secs': 0.017556} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.989691] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.989981] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 936.990248] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.990403] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.990592] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 936.990860] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e22fcede-9313-4063-b483-f41c9d024187 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.999673] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 936.999904] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 937.000668] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6bb12a4-a3b1-4593-897f-5e81568c73ec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.005884] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 937.005884] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529dea24-9a46-6e53-e890-11871c43c558" [ 937.005884] env[61852]: _type = "Task" [ 937.005884] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.013873] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529dea24-9a46-6e53-e890-11871c43c558, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.089226] env[61852]: DEBUG nova.compute.manager [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 937.092475] env[61852]: DEBUG oslo_concurrency.lockutils [req-46a762f0-409c-4be1-88bd-1fe68c085bf2 req-dfef9198-567d-4d84-ba33-88fdb2d5858d service nova] Releasing lock "refresh_cache-ba863c60-444a-4959-8f8f-87b4952d2872" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.253170] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293186, 'name': PowerOnVM_Task} progress is 81%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.258702] env[61852]: DEBUG nova.network.neutron [-] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.517097] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529dea24-9a46-6e53-e890-11871c43c558, 'name': SearchDatastore_Task, 'duration_secs': 0.022126} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.518014] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6da192a-e857-419c-9ab8-fe9cf598caed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.523860] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 937.523860] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52eb40a1-2a75-1a65-56d4-df19648e5d1e" [ 937.523860] env[61852]: _type = "Task" [ 937.523860] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.533155] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52eb40a1-2a75-1a65-56d4-df19648e5d1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.548570] env[61852]: DEBUG nova.compute.manager [req-a18b2bf5-b426-4140-9fcd-f831d90d39fb req-3e9ca38e-4137-4490-82e1-04ec8a4fcdaf service nova] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Received event network-vif-deleted-3e0cb15e-f2d1-47c8-975c-dd685e0ad664 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 937.593139] env[61852]: DEBUG nova.network.neutron [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance_info_cache with network_info: [{"id": "61e94b93-d030-4c70-8ffc-ce81cbf29d01", "address": "fa:16:3e:23:a2:0c", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61e94b93-d0", "ovs_interfaceid": "61e94b93-d030-4c70-8ffc-ce81cbf29d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.750966] env[61852]: DEBUG oslo_vmware.api [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293186, 'name': PowerOnVM_Task, 'duration_secs': 0.865953} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.751270] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.751483] env[61852]: INFO nova.compute.manager [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Took 8.68 seconds to spawn the instance on the hypervisor. [ 937.751692] env[61852]: DEBUG nova.compute.manager [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 937.752477] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc66cbba-9cb5-4895-a698-78b76f1044e0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.761498] env[61852]: INFO nova.compute.manager [-] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Took 1.33 seconds to deallocate network for instance. [ 938.034065] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52eb40a1-2a75-1a65-56d4-df19648e5d1e, 'name': SearchDatastore_Task, 'duration_secs': 0.017092} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.034358] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.034622] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] ba863c60-444a-4959-8f8f-87b4952d2872/ba863c60-444a-4959-8f8f-87b4952d2872.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 938.034884] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97af11db-2c07-4632-abf9-dbc6a36d3c0c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.041233] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 938.041233] env[61852]: value = "task-1293187" [ 938.041233] env[61852]: _type = "Task" [ 938.041233] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.048528] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.098297] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-51ecc9c3-a3fc-4bd7-8c90-003451700212" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.098591] env[61852]: DEBUG nova.objects.instance [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lazy-loading 'migration_context' on Instance uuid 51ecc9c3-a3fc-4bd7-8c90-003451700212 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.101132] env[61852]: DEBUG nova.compute.manager [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 938.128130] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 938.128435] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 938.128610] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.128799] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 938.128952] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.129120] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 938.129346] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 938.129523] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 938.129704] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 938.129887] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 938.130070] env[61852]: DEBUG nova.virt.hardware [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 938.130940] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4185c0f-f9ce-45de-88b3-8be0b4fd69d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.139104] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ce253e-8d90-4d11-aae5-0c69de0ada81 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.277014] env[61852]: INFO nova.compute.manager [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Took 16.59 seconds to build instance. [ 938.282620] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.283394] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.283638] env[61852]: DEBUG nova.objects.instance [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lazy-loading 'resources' on Instance uuid 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.381260] env[61852]: DEBUG nova.compute.manager [req-866fbfee-d2cf-4690-aa13-b31214097167 req-a3c8fcae-f8df-456f-943c-925bfa5d8edd service nova] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Received event network-vif-plugged-983b26a7-ea32-4616-8527-24b24ecdb040 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 938.381571] env[61852]: DEBUG oslo_concurrency.lockutils [req-866fbfee-d2cf-4690-aa13-b31214097167 req-a3c8fcae-f8df-456f-943c-925bfa5d8edd service nova] Acquiring lock "561d33d0-cad5-48ae-bd32-5de2220c5283-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.381800] env[61852]: DEBUG oslo_concurrency.lockutils [req-866fbfee-d2cf-4690-aa13-b31214097167 req-a3c8fcae-f8df-456f-943c-925bfa5d8edd service nova] Lock "561d33d0-cad5-48ae-bd32-5de2220c5283-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.381985] env[61852]: DEBUG oslo_concurrency.lockutils [req-866fbfee-d2cf-4690-aa13-b31214097167 req-a3c8fcae-f8df-456f-943c-925bfa5d8edd service nova] Lock "561d33d0-cad5-48ae-bd32-5de2220c5283-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.382421] env[61852]: DEBUG nova.compute.manager [req-866fbfee-d2cf-4690-aa13-b31214097167 req-a3c8fcae-f8df-456f-943c-925bfa5d8edd service nova] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] No waiting events found dispatching network-vif-plugged-983b26a7-ea32-4616-8527-24b24ecdb040 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 938.382903] env[61852]: WARNING nova.compute.manager [req-866fbfee-d2cf-4690-aa13-b31214097167 req-a3c8fcae-f8df-456f-943c-925bfa5d8edd service nova] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Received unexpected event network-vif-plugged-983b26a7-ea32-4616-8527-24b24ecdb040 for instance with vm_state building and task_state spawning. [ 938.553204] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293187, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.589674] env[61852]: DEBUG nova.network.neutron [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Successfully updated port: 983b26a7-ea32-4616-8527-24b24ecdb040 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.603900] env[61852]: DEBUG nova.objects.base [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Object Instance<51ecc9c3-a3fc-4bd7-8c90-003451700212> lazy-loaded attributes: info_cache,migration_context {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 938.607148] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2836accb-8043-41f4-8259-5f410016422d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.632386] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc37ccec-dcde-44ef-a624-4d07ae825ca5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.638756] env[61852]: DEBUG oslo_vmware.api [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 938.638756] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52735c3e-86a5-5af8-15a8-b733f588bcd0" [ 938.638756] env[61852]: _type = "Task" [ 938.638756] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.651228] env[61852]: DEBUG oslo_vmware.api [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52735c3e-86a5-5af8-15a8-b733f588bcd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.789316] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ae391d66-db30-4346-ad8a-c261362136ca tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.108s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.936622] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcbbd8b-5e53-4a14-b502-92d96bf8f626 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.944408] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f03dddb-e2ff-48b4-aad4-960329ab9e92 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.974782] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9687a2-a306-44fa-bdf4-78dbd22d4168 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.982757] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666e20e1-797d-4acf-9e95-3fed208e8cbc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.998809] env[61852]: DEBUG nova.compute.provider_tree [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 939.051526] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293187, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.702855} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.051820] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] ba863c60-444a-4959-8f8f-87b4952d2872/ba863c60-444a-4959-8f8f-87b4952d2872.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 939.052083] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 939.052357] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68824ce4-2ddb-4dc8-96db-e4e3c1979b64 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.059739] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 939.059739] env[61852]: value = "task-1293188" [ 939.059739] env[61852]: _type = "Task" [ 939.059739] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.067787] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293188, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.093231] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "refresh_cache-561d33d0-cad5-48ae-bd32-5de2220c5283" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.093469] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "refresh_cache-561d33d0-cad5-48ae-bd32-5de2220c5283" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.093963] env[61852]: DEBUG nova.network.neutron [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.152543] env[61852]: DEBUG oslo_vmware.api [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52735c3e-86a5-5af8-15a8-b733f588bcd0, 'name': SearchDatastore_Task, 'duration_secs': 0.037477} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.152928] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.531692] env[61852]: DEBUG nova.scheduler.client.report [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 106 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 939.532017] env[61852]: DEBUG nova.compute.provider_tree [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 106 to 107 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 939.532382] env[61852]: DEBUG nova.compute.provider_tree [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 939.569536] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293188, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070656} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.569824] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 939.570634] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19183ee2-e454-45ed-8b7a-c6793c9757cb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.593165] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] ba863c60-444a-4959-8f8f-87b4952d2872/ba863c60-444a-4959-8f8f-87b4952d2872.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 939.593520] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e7f9150-d4c9-457e-9785-344a1baf62b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.614543] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 939.614543] env[61852]: value = "task-1293189" [ 939.614543] env[61852]: _type = "Task" [ 939.614543] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.623621] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293189, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.638913] env[61852]: DEBUG nova.network.neutron [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 939.732263] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5267b81d-cd58-a4ba-da6e-f5ee6cf683cd/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 939.733122] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29e42e6-d0f5-41a3-9b78-ffba6221a3f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.739616] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5267b81d-cd58-a4ba-da6e-f5ee6cf683cd/disk-0.vmdk is in state: ready. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 939.739788] env[61852]: ERROR oslo_vmware.rw_handles [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5267b81d-cd58-a4ba-da6e-f5ee6cf683cd/disk-0.vmdk due to incomplete transfer. [ 939.740041] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e8b45462-c9b7-4f8e-8ef7-888046b37b73 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.748587] env[61852]: DEBUG oslo_vmware.rw_handles [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5267b81d-cd58-a4ba-da6e-f5ee6cf683cd/disk-0.vmdk. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 939.748792] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Uploaded image f5e5a587-44f8-4b6c-b924-cca27583fcf9 to the Glance image server {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 939.751052] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Destroying the VM {{(pid=61852) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 939.751313] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6d83ddaf-91da-48c7-ae9b-2202f5d32be4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.757285] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 939.757285] env[61852]: value = "task-1293190" [ 939.757285] env[61852]: _type = "Task" [ 939.757285] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.765017] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293190, 'name': Destroy_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.782011] env[61852]: DEBUG nova.network.neutron [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Updating instance_info_cache with network_info: [{"id": "983b26a7-ea32-4616-8527-24b24ecdb040", "address": "fa:16:3e:6b:d4:e2", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap983b26a7-ea", "ovs_interfaceid": "983b26a7-ea32-4616-8527-24b24ecdb040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.038574] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.755s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.041640] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.888s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.071975] env[61852]: INFO nova.scheduler.client.report [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted allocations for instance 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075 [ 940.125369] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293189, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.274032] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293190, 'name': Destroy_Task, 'duration_secs': 0.361817} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.274032] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Destroyed the VM [ 940.274032] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Deleting Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 940.274705] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7a57fbc7-34f8-45b3-964d-df6da6213bd4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.284039] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 940.284039] env[61852]: value = "task-1293191" [ 940.284039] env[61852]: _type = "Task" [ 940.284039] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.287706] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "refresh_cache-561d33d0-cad5-48ae-bd32-5de2220c5283" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.288336] env[61852]: DEBUG nova.compute.manager [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Instance network_info: |[{"id": "983b26a7-ea32-4616-8527-24b24ecdb040", "address": "fa:16:3e:6b:d4:e2", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap983b26a7-ea", "ovs_interfaceid": "983b26a7-ea32-4616-8527-24b24ecdb040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 940.289102] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:d4:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2c424c9-6446-4b2a-af8c-4d9c29117c39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '983b26a7-ea32-4616-8527-24b24ecdb040', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.306033] env[61852]: DEBUG oslo.service.loopingcall [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 940.306033] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.306033] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e310f07-1099-4e61-ae4e-fce339cd1b9a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.337627] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293191, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.346021] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 940.346021] env[61852]: value = "task-1293192" [ 940.346021] env[61852]: _type = "Task" [ 940.346021] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.357497] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293192, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.418263] env[61852]: DEBUG nova.compute.manager [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Received event network-changed-983b26a7-ea32-4616-8527-24b24ecdb040 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.418559] env[61852]: DEBUG nova.compute.manager [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Refreshing instance network info cache due to event network-changed-983b26a7-ea32-4616-8527-24b24ecdb040. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 940.418924] env[61852]: DEBUG oslo_concurrency.lockutils [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] Acquiring lock "refresh_cache-561d33d0-cad5-48ae-bd32-5de2220c5283" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.419157] env[61852]: DEBUG oslo_concurrency.lockutils [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] Acquired lock "refresh_cache-561d33d0-cad5-48ae-bd32-5de2220c5283" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.419403] env[61852]: DEBUG nova.network.neutron [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Refreshing network info cache for port 983b26a7-ea32-4616-8527-24b24ecdb040 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.503513] env[61852]: DEBUG nova.compute.manager [req-37ccf60b-4088-4ff1-b1e0-df9419a93684 req-181da911-229f-4580-9145-23fa5312be1c service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Received event network-changed-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.503737] env[61852]: DEBUG nova.compute.manager [req-37ccf60b-4088-4ff1-b1e0-df9419a93684 req-181da911-229f-4580-9145-23fa5312be1c service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Refreshing instance network info cache due to event network-changed-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 940.504019] env[61852]: DEBUG oslo_concurrency.lockutils [req-37ccf60b-4088-4ff1-b1e0-df9419a93684 req-181da911-229f-4580-9145-23fa5312be1c service nova] Acquiring lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.504426] env[61852]: DEBUG oslo_concurrency.lockutils [req-37ccf60b-4088-4ff1-b1e0-df9419a93684 req-181da911-229f-4580-9145-23fa5312be1c service nova] Acquired lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.504605] env[61852]: DEBUG nova.network.neutron [req-37ccf60b-4088-4ff1-b1e0-df9419a93684 req-181da911-229f-4580-9145-23fa5312be1c service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Refreshing network info cache for port fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.583223] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5ef7846d-4b73-4704-8650-ca7f14fa7385 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "8bdb8059-3fb5-4f9c-bc73-b85bf8a23075" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.762s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.627510] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293189, 'name': ReconfigVM_Task, 'duration_secs': 0.761387} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.627807] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Reconfigured VM instance instance-00000059 to attach disk [datastore1] ba863c60-444a-4959-8f8f-87b4952d2872/ba863c60-444a-4959-8f8f-87b4952d2872.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.630971] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf64afda-cb77-49e9-ad70-6a828ab75d35 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.638267] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 940.638267] env[61852]: value = "task-1293193" [ 940.638267] env[61852]: _type = "Task" [ 940.638267] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.648679] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293193, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.679840] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37936cb8-4023-4d09-842c-31b92d220de7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.688266] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5235bd90-4648-4def-908a-4e38fa97a22d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.718813] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7608fe-9e55-46d5-872e-1eedc9284757 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.726247] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f802969-d76a-4497-9c00-a4cb25c5b71c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.740990] env[61852]: DEBUG nova.compute.provider_tree [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.792440] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293191, 'name': RemoveSnapshot_Task, 'duration_secs': 0.401037} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.792780] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Deleted Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 940.793204] env[61852]: DEBUG nova.compute.manager [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 940.794146] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68656634-8a5d-47a7-9cf3-974a6ebcbace {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.853707] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293192, 'name': CreateVM_Task, 'duration_secs': 0.35624} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.853927] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 940.854601] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.854784] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.855118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 940.855367] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f3b2053-e3ad-4c15-b65a-e7f0ea7364d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.860296] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 940.860296] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524177a1-55f1-7f77-bfa8-95ee4124f854" [ 940.860296] env[61852]: _type = "Task" [ 940.860296] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.867842] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524177a1-55f1-7f77-bfa8-95ee4124f854, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.134349] env[61852]: DEBUG nova.network.neutron [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Updated VIF entry in instance network info cache for port 983b26a7-ea32-4616-8527-24b24ecdb040. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 941.134744] env[61852]: DEBUG nova.network.neutron [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Updating instance_info_cache with network_info: [{"id": "983b26a7-ea32-4616-8527-24b24ecdb040", "address": "fa:16:3e:6b:d4:e2", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap983b26a7-ea", "ovs_interfaceid": "983b26a7-ea32-4616-8527-24b24ecdb040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.149962] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293193, 'name': Rename_Task, 'duration_secs': 0.335939} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.150374] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 941.150635] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb600bb9-d96a-4a1d-8b33-f02faa31fb53 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.163286] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 941.163286] env[61852]: value = "task-1293194" [ 941.163286] env[61852]: _type = "Task" [ 941.163286] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.179483] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293194, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.244168] env[61852]: DEBUG nova.scheduler.client.report [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 941.307569] env[61852]: INFO nova.compute.manager [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Shelve offloading [ 941.309383] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.310221] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31b32b00-60dc-46a1-a808-267291259199 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.316572] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 941.316572] env[61852]: value = "task-1293195" [ 941.316572] env[61852]: _type = "Task" [ 941.316572] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.329221] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] VM already powered off {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 941.329425] env[61852]: DEBUG nova.compute.manager [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 941.330214] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fe2eeb-1766-4a52-97b4-2c9afc87f516 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.336270] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.336478] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.336626] env[61852]: DEBUG nova.network.neutron [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.372447] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524177a1-55f1-7f77-bfa8-95ee4124f854, 'name': SearchDatastore_Task, 'duration_secs': 0.034174} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.372914] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.373189] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 941.373507] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.373670] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.373900] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.374252] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74b72c3d-2cd6-462a-af53-0c6051430079 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.383565] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.383776] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 941.384647] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19195fb7-f52f-4041-be40-a7509aa0ccc8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.390970] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 941.390970] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529fc649-9df0-b935-4c8f-0f5c5d0a01ab" [ 941.390970] env[61852]: _type = "Task" [ 941.390970] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.399832] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529fc649-9df0-b935-4c8f-0f5c5d0a01ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.412394] env[61852]: DEBUG nova.network.neutron [req-37ccf60b-4088-4ff1-b1e0-df9419a93684 req-181da911-229f-4580-9145-23fa5312be1c service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updated VIF entry in instance network info cache for port fb4d01a4-4b0f-4591-aaf9-f8487c4cd460. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 941.412889] env[61852]: DEBUG nova.network.neutron [req-37ccf60b-4088-4ff1-b1e0-df9419a93684 req-181da911-229f-4580-9145-23fa5312be1c service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updating instance_info_cache with network_info: [{"id": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "address": "fa:16:3e:fe:46:c7", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb4d01a4-4b", "ovs_interfaceid": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.637608] env[61852]: DEBUG oslo_concurrency.lockutils [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] Releasing lock "refresh_cache-561d33d0-cad5-48ae-bd32-5de2220c5283" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.637886] env[61852]: DEBUG nova.compute.manager [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received event network-changed-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 941.638507] env[61852]: DEBUG nova.compute.manager [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing instance network info cache due to event network-changed-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 941.638507] env[61852]: DEBUG oslo_concurrency.lockutils [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] Acquiring lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.638507] env[61852]: DEBUG oslo_concurrency.lockutils [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] Acquired lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.638707] env[61852]: DEBUG nova.network.neutron [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing network info cache for port 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 941.674986] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293194, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.902099] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529fc649-9df0-b935-4c8f-0f5c5d0a01ab, 'name': SearchDatastore_Task, 'duration_secs': 0.013176} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.902949] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69af682c-8e78-47d9-81f8-8dc478ec7c5a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.908132] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 941.908132] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522e5786-1ab1-7385-9569-5493619ba37a" [ 941.908132] env[61852]: _type = "Task" [ 941.908132] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.916267] env[61852]: DEBUG oslo_concurrency.lockutils [req-37ccf60b-4088-4ff1-b1e0-df9419a93684 req-181da911-229f-4580-9145-23fa5312be1c service nova] Releasing lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.916709] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522e5786-1ab1-7385-9569-5493619ba37a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.047790] env[61852]: DEBUG nova.network.neutron [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updating instance_info_cache with network_info: [{"id": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "address": "fa:16:3e:82:81:71", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83bdd4e5-89", "ovs_interfaceid": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.175573] env[61852]: DEBUG oslo_vmware.api [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293194, 'name': PowerOnVM_Task, 'duration_secs': 0.946229} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.175866] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 942.176084] env[61852]: INFO nova.compute.manager [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Took 9.06 seconds to spawn the instance on the hypervisor. [ 942.176260] env[61852]: DEBUG nova.compute.manager [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 942.177018] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d629d7d4-680f-4ad8-9660-fb16940ae46f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.249814] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.249814] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.254096] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.213s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.420202] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522e5786-1ab1-7385-9569-5493619ba37a, 'name': SearchDatastore_Task, 'duration_secs': 0.021097} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.421060] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.421060] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 561d33d0-cad5-48ae-bd32-5de2220c5283/561d33d0-cad5-48ae-bd32-5de2220c5283.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 942.421328] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e588f552-aca1-48bb-ae13-ccde51c0ab86 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.427542] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 942.427542] env[61852]: value = "task-1293196" [ 942.427542] env[61852]: _type = "Task" [ 942.427542] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.435190] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293196, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.445405] env[61852]: DEBUG nova.network.neutron [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updated VIF entry in instance network info cache for port 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.446082] env[61852]: DEBUG nova.network.neutron [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [{"id": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "address": "fa:16:3e:b2:2d:44", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab757ae-eb", "ovs_interfaceid": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.527750] env[61852]: DEBUG nova.compute.manager [req-c0aa7b7b-33e3-4444-be4e-7fc34b0cf995 req-c8713d44-1704-42ac-aeaa-2dc8c819ffd3 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received event network-changed-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 942.528030] env[61852]: DEBUG nova.compute.manager [req-c0aa7b7b-33e3-4444-be4e-7fc34b0cf995 req-c8713d44-1704-42ac-aeaa-2dc8c819ffd3 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing instance network info cache due to event network-changed-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 942.528275] env[61852]: DEBUG oslo_concurrency.lockutils [req-c0aa7b7b-33e3-4444-be4e-7fc34b0cf995 req-c8713d44-1704-42ac-aeaa-2dc8c819ffd3 service nova] Acquiring lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.550122] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.700187] env[61852]: INFO nova.compute.manager [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Took 15.84 seconds to build instance. [ 942.752211] env[61852]: DEBUG nova.compute.manager [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 942.833858] env[61852]: INFO nova.scheduler.client.report [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted allocation for migration d85168bf-e137-4b9c-a79c-77442dc6a529 [ 942.941244] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293196, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.948469] env[61852]: DEBUG oslo_concurrency.lockutils [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] Releasing lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.949015] env[61852]: DEBUG nova.compute.manager [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Received event network-changed-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 942.949233] env[61852]: DEBUG nova.compute.manager [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Refreshing instance network info cache due to event network-changed-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 942.949478] env[61852]: DEBUG oslo_concurrency.lockutils [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] Acquiring lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.949795] env[61852]: DEBUG oslo_concurrency.lockutils [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] Acquired lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.949983] env[61852]: DEBUG nova.network.neutron [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Refreshing network info cache for port fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.951950] env[61852]: DEBUG oslo_concurrency.lockutils [req-c0aa7b7b-33e3-4444-be4e-7fc34b0cf995 req-c8713d44-1704-42ac-aeaa-2dc8c819ffd3 service nova] Acquired lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.952379] env[61852]: DEBUG nova.network.neutron [req-c0aa7b7b-33e3-4444-be4e-7fc34b0cf995 req-c8713d44-1704-42ac-aeaa-2dc8c819ffd3 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing network info cache for port 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.960536] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 942.961671] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fee349d-f207-46cf-b9f3-b40b262077d8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.970833] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 942.971136] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f897a128-d64f-447a-b39e-931265ab7b4d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.038901] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 943.039189] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 943.039383] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleting the datastore file [datastore1] 8d8679db-eb9d-45c1-b053-70378f58e273 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 943.039647] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c450e23-f6f5-4750-992c-d4ca5914c545 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.046280] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 943.046280] env[61852]: value = "task-1293198" [ 943.046280] env[61852]: _type = "Task" [ 943.046280] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.054969] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293198, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.203304] env[61852]: DEBUG oslo_concurrency.lockutils [None req-67fedbb4-c74c-48ef-9805-e29678c21780 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "ba863c60-444a-4959-8f8f-87b4952d2872" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.355s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.280119] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.280295] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.281860] env[61852]: INFO nova.compute.claims [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.339143] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5cf7b8d2-aa93-461f-93f5-9e063dde08b6 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.669s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.437704] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293196, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547136} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.437880] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 561d33d0-cad5-48ae-bd32-5de2220c5283/561d33d0-cad5-48ae-bd32-5de2220c5283.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 943.438140] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 943.438406] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d307128-9671-46a2-b128-3844c82065a4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.444873] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 943.444873] env[61852]: value = "task-1293199" [ 943.444873] env[61852]: _type = "Task" [ 943.444873] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.455087] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293199, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.557096] env[61852]: DEBUG oslo_vmware.api [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293198, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.416615} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.557953] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 943.558410] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 943.558814] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 943.586577] env[61852]: INFO nova.scheduler.client.report [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleted allocations for instance 8d8679db-eb9d-45c1-b053-70378f58e273 [ 943.697251] env[61852]: DEBUG nova.network.neutron [req-c0aa7b7b-33e3-4444-be4e-7fc34b0cf995 req-c8713d44-1704-42ac-aeaa-2dc8c819ffd3 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updated VIF entry in instance network info cache for port 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 943.697486] env[61852]: DEBUG nova.network.neutron [req-c0aa7b7b-33e3-4444-be4e-7fc34b0cf995 req-c8713d44-1704-42ac-aeaa-2dc8c819ffd3 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [{"id": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "address": "fa:16:3e:b2:2d:44", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab757ae-eb", "ovs_interfaceid": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.758775] env[61852]: DEBUG nova.network.neutron [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updated VIF entry in instance network info cache for port fb4d01a4-4b0f-4591-aaf9-f8487c4cd460. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 943.759199] env[61852]: DEBUG nova.network.neutron [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updating instance_info_cache with network_info: [{"id": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "address": "fa:16:3e:fe:46:c7", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb4d01a4-4b", "ovs_interfaceid": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.954904] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293199, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.347689} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.955201] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 943.956168] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9f200c-85e2-4ef1-bd79-36b23fd58f99 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.978767] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 561d33d0-cad5-48ae-bd32-5de2220c5283/561d33d0-cad5-48ae-bd32-5de2220c5283.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.979402] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b93c6142-0a99-460a-8aa4-153f8aba002f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.000075] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 944.000075] env[61852]: value = "task-1293200" [ 944.000075] env[61852]: _type = "Task" [ 944.000075] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.007896] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293200, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.034850] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "51ecc9c3-a3fc-4bd7-8c90-003451700212" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.035163] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.035377] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "51ecc9c3-a3fc-4bd7-8c90-003451700212-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.035599] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.035814] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.039413] env[61852]: INFO nova.compute.manager [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Terminating instance [ 944.041470] env[61852]: DEBUG nova.compute.manager [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 944.041709] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.042674] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e092b9-c99c-42ac-aa16-8b9e401bff5f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.051097] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.051407] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6a21a24-a5ca-48b8-842c-fcecd5911567 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.059086] env[61852]: DEBUG oslo_vmware.api [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 944.059086] env[61852]: value = "task-1293201" [ 944.059086] env[61852]: _type = "Task" [ 944.059086] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.066666] env[61852]: DEBUG oslo_vmware.api [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293201, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.093988] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.200140] env[61852]: DEBUG oslo_concurrency.lockutils [req-c0aa7b7b-33e3-4444-be4e-7fc34b0cf995 req-c8713d44-1704-42ac-aeaa-2dc8c819ffd3 service nova] Releasing lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.262233] env[61852]: DEBUG oslo_concurrency.lockutils [req-e570f6b2-b6bf-475b-8573-0b757ab49854 req-2219f008-5cae-44b8-9c65-41f3a45bf2d4 service nova] Releasing lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.391959] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f0fe21-44c3-4975-ae3c-d583c965992b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.399608] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d6c4f3-8cea-4d3d-8426-5305520762bb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.434461] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6a98d8-c2fa-4e45-ab64-36bcc381c2ec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.442640] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d1f6b1-4c37-4a62-a6e9-0f1699ea3a18 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.457275] env[61852]: DEBUG nova.compute.provider_tree [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.511219] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293200, 'name': ReconfigVM_Task, 'duration_secs': 0.343234} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.511420] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 561d33d0-cad5-48ae-bd32-5de2220c5283/561d33d0-cad5-48ae-bd32-5de2220c5283.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.512011] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c87199c2-39fc-4c07-b01b-2ccf1f045e0b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.518383] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 944.518383] env[61852]: value = "task-1293202" [ 944.518383] env[61852]: _type = "Task" [ 944.518383] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.525852] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "df332116-2ae3-4e51-99b0-108921470959" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.526090] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "df332116-2ae3-4e51-99b0-108921470959" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.530221] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293202, 'name': Rename_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.553961] env[61852]: DEBUG nova.compute.manager [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Received event network-vif-unplugged-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 944.554180] env[61852]: DEBUG oslo_concurrency.lockutils [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] Acquiring lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.554386] env[61852]: DEBUG oslo_concurrency.lockutils [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] Lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.554567] env[61852]: DEBUG oslo_concurrency.lockutils [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] Lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.554740] env[61852]: DEBUG nova.compute.manager [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] No waiting events found dispatching network-vif-unplugged-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 944.554913] env[61852]: WARNING nova.compute.manager [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Received unexpected event network-vif-unplugged-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b for instance with vm_state shelved_offloaded and task_state None. [ 944.555090] env[61852]: DEBUG nova.compute.manager [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Received event network-changed-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 944.555291] env[61852]: DEBUG nova.compute.manager [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Refreshing instance network info cache due to event network-changed-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 944.555521] env[61852]: DEBUG oslo_concurrency.lockutils [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] Acquiring lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.555665] env[61852]: DEBUG oslo_concurrency.lockutils [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] Acquired lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.555865] env[61852]: DEBUG nova.network.neutron [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Refreshing network info cache for port 83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 944.569417] env[61852]: DEBUG oslo_vmware.api [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293201, 'name': PowerOffVM_Task, 'duration_secs': 0.181021} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.569802] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.569854] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.570120] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d24da08b-b263-4afe-8dd9-634c2e26fcb3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.624497] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 944.624866] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 944.624934] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleting the datastore file [datastore1] 51ecc9c3-a3fc-4bd7-8c90-003451700212 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.625218] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8a9889d-d572-4238-904e-ba01e4509bf7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.633340] env[61852]: DEBUG oslo_vmware.api [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 944.633340] env[61852]: value = "task-1293204" [ 944.633340] env[61852]: _type = "Task" [ 944.633340] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.643149] env[61852]: DEBUG oslo_vmware.api [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.906914] env[61852]: DEBUG nova.compute.manager [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 944.907963] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a122adf-8764-4ca0-b2be-e08a6adec13d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.960733] env[61852]: DEBUG nova.scheduler.client.report [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 945.028036] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293202, 'name': Rename_Task, 'duration_secs': 0.138035} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.028309] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.028561] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ac070e8-8d5b-4838-a990-454333862e00 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.032156] env[61852]: DEBUG nova.compute.manager [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 945.036393] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 945.036393] env[61852]: value = "task-1293205" [ 945.036393] env[61852]: _type = "Task" [ 945.036393] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.050546] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293205, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.143200] env[61852]: DEBUG oslo_vmware.api [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179613} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.145683] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.145915] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.146101] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.146302] env[61852]: INFO nova.compute.manager [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Took 1.10 seconds to destroy the instance on the hypervisor. [ 945.146643] env[61852]: DEBUG oslo.service.loopingcall [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.146901] env[61852]: DEBUG nova.compute.manager [-] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 945.147048] env[61852]: DEBUG nova.network.neutron [-] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 945.373836] env[61852]: DEBUG nova.network.neutron [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updated VIF entry in instance network info cache for port 83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.374292] env[61852]: DEBUG nova.network.neutron [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updating instance_info_cache with network_info: [{"id": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "address": "fa:16:3e:82:81:71", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": null, "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap83bdd4e5-89", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.496758] env[61852]: INFO nova.compute.manager [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] instance snapshotting [ 945.496758] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521b444a-2e08-4040-8584-9fa9f2ef5825 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.496758] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8237bdb1-2828-40a8-8ada-4211646984cc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.496758] env[61852]: DEBUG nova.compute.manager [req-a4572e26-0e8e-44e6-8187-33df61a6c9b7 req-0219a65b-b51a-4d74-a7da-0cc271a8b193 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Received event network-vif-deleted-61e94b93-d030-4c70-8ffc-ce81cbf29d01 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.496758] env[61852]: INFO nova.compute.manager [req-a4572e26-0e8e-44e6-8187-33df61a6c9b7 req-0219a65b-b51a-4d74-a7da-0cc271a8b193 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Neutron deleted interface 61e94b93-d030-4c70-8ffc-ce81cbf29d01; detaching it from the instance and deleting it from the info cache [ 945.496758] env[61852]: DEBUG nova.network.neutron [req-a4572e26-0e8e-44e6-8187-33df61a6c9b7 req-0219a65b-b51a-4d74-a7da-0cc271a8b193 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.496758] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.185s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.496758] env[61852]: DEBUG nova.compute.manager [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 945.496758] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.375s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.496758] env[61852]: DEBUG nova.objects.instance [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lazy-loading 'resources' on Instance uuid 8d8679db-eb9d-45c1-b053-70378f58e273 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.551442] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293205, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.558557] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.877264] env[61852]: DEBUG oslo_concurrency.lockutils [req-ca875f9f-18f1-4269-81f8-4d7f67f0eaa1 req-84641576-670a-4697-aa18-8737e1de2d07 service nova] Releasing lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.913105] env[61852]: DEBUG nova.network.neutron [-] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.950706] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17565af8-b24c-4d1f-94e6-4c318918e80f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.957505] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Creating Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 945.957777] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-72c6c4ba-e23b-41f6-9f3f-6aea2a33283d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.963638] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f6f98d-7a1c-4e32-a5f5-b3a12ff9f436 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.976213] env[61852]: DEBUG nova.compute.utils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 945.977638] env[61852]: DEBUG nova.objects.instance [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lazy-loading 'numa_topology' on Instance uuid 8d8679db-eb9d-45c1-b053-70378f58e273 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.978564] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 945.978564] env[61852]: value = "task-1293206" [ 945.978564] env[61852]: _type = "Task" [ 945.978564] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.979015] env[61852]: DEBUG nova.compute.manager [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 945.979170] env[61852]: DEBUG nova.network.neutron [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 945.990632] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293206, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.998173] env[61852]: DEBUG nova.compute.manager [req-a4572e26-0e8e-44e6-8187-33df61a6c9b7 req-0219a65b-b51a-4d74-a7da-0cc271a8b193 service nova] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Detach interface failed, port_id=61e94b93-d030-4c70-8ffc-ce81cbf29d01, reason: Instance 51ecc9c3-a3fc-4bd7-8c90-003451700212 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 946.031414] env[61852]: DEBUG nova.policy [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be922c40dddf48c8ae436d0a244e7b6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdac3605118e44a69d44ab56cafe2e21', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 946.048857] env[61852]: DEBUG oslo_vmware.api [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293205, 'name': PowerOnVM_Task, 'duration_secs': 0.724875} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.049146] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.049355] env[61852]: INFO nova.compute.manager [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Took 7.95 seconds to spawn the instance on the hypervisor. [ 946.049538] env[61852]: DEBUG nova.compute.manager [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 946.050327] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0a62aa-3d18-462f-856d-3ade67b73fb7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.106209] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "8d8679db-eb9d-45c1-b053-70378f58e273" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.304443] env[61852]: DEBUG nova.network.neutron [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Successfully created port: 41d20024-17d1-4e43-ad02-a6316dcc9c2f {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.416605] env[61852]: INFO nova.compute.manager [-] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Took 1.27 seconds to deallocate network for instance. [ 946.479880] env[61852]: DEBUG nova.compute.manager [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 946.482590] env[61852]: DEBUG nova.objects.base [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Object Instance<8d8679db-eb9d-45c1-b053-70378f58e273> lazy-loaded attributes: resources,numa_topology {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 946.495825] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293206, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.569058] env[61852]: INFO nova.compute.manager [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Took 16.80 seconds to build instance. [ 946.631683] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c207b7d1-2c87-4cf2-ad51-5ecf74eb37a0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.639231] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c3c788-1ca1-4f1f-a505-e53ef2d8e70e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.670623] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208f6acb-bc8e-4af5-930d-1542f04b092e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.677865] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727cae49-a428-4a57-a3f9-08c08c399dcd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.690843] env[61852]: DEBUG nova.compute.provider_tree [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 946.928111] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.993519] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293206, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.070781] env[61852]: DEBUG oslo_concurrency.lockutils [None req-dffac153-9152-4a52-ba9d-b7f144162d48 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "561d33d0-cad5-48ae-bd32-5de2220c5283" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.306s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.209583] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "561d33d0-cad5-48ae-bd32-5de2220c5283" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.209925] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "561d33d0-cad5-48ae-bd32-5de2220c5283" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.210167] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "561d33d0-cad5-48ae-bd32-5de2220c5283-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.210359] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "561d33d0-cad5-48ae-bd32-5de2220c5283-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.210537] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "561d33d0-cad5-48ae-bd32-5de2220c5283-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.213236] env[61852]: ERROR nova.scheduler.client.report [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [req-32dc342c-edc3-4765-b2d7-32a53445b843] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-32dc342c-edc3-4765-b2d7-32a53445b843"}]} [ 947.215672] env[61852]: INFO nova.compute.manager [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Terminating instance [ 947.217833] env[61852]: DEBUG nova.compute.manager [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 947.218079] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.218890] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801ec386-97a4-4d35-b941-9b6c65a2d596 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.227468] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.227716] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5ae12c7-7fbe-42f1-a5d5-2f9f42f799cf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.230403] env[61852]: DEBUG nova.scheduler.client.report [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 947.233787] env[61852]: DEBUG oslo_vmware.api [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 947.233787] env[61852]: value = "task-1293207" [ 947.233787] env[61852]: _type = "Task" [ 947.233787] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.241526] env[61852]: DEBUG oslo_vmware.api [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.244567] env[61852]: DEBUG nova.scheduler.client.report [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 947.244966] env[61852]: DEBUG nova.compute.provider_tree [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 947.256473] env[61852]: DEBUG nova.scheduler.client.report [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 947.275724] env[61852]: DEBUG nova.scheduler.client.report [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 947.408102] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b859a9-74c0-4907-8ff4-ef4053c5bd6c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.416590] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95379c4-2bdb-4a14-a3ad-b22e8a40f3a2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.446881] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1145e052-11a6-4e13-a44d-63a923b4c9d2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.454417] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc92ee9-6e66-407a-9e87-5d66b8832ec3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.468293] env[61852]: DEBUG nova.compute.provider_tree [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 947.494505] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293206, 'name': CreateSnapshot_Task, 'duration_secs': 1.273284} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.495973] env[61852]: DEBUG nova.compute.manager [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 947.498686] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Created Snapshot of the VM instance {{(pid=61852) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 947.499769] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654a1b83-84e1-4b43-aa15-1f3278d6618c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.522309] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 947.522660] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 947.522929] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 947.523257] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 947.523512] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 947.523771] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 947.524121] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 947.524398] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 947.524688] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 947.524972] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 947.525293] env[61852]: DEBUG nova.virt.hardware [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 947.526446] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051cb814-0e5c-4d8a-a267-d7f3ffb88043 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.536558] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9227aece-964e-407e-a336-a4d81e806526 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.696891] env[61852]: DEBUG nova.compute.manager [req-2ef7a2bf-7e1c-42df-adea-81fff410c63e req-62701f65-568a-4402-a9e4-be0e736e0ffb service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Received event network-vif-plugged-41d20024-17d1-4e43-ad02-a6316dcc9c2f {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.697136] env[61852]: DEBUG oslo_concurrency.lockutils [req-2ef7a2bf-7e1c-42df-adea-81fff410c63e req-62701f65-568a-4402-a9e4-be0e736e0ffb service nova] Acquiring lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.697356] env[61852]: DEBUG oslo_concurrency.lockutils [req-2ef7a2bf-7e1c-42df-adea-81fff410c63e req-62701f65-568a-4402-a9e4-be0e736e0ffb service nova] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.697574] env[61852]: DEBUG oslo_concurrency.lockutils [req-2ef7a2bf-7e1c-42df-adea-81fff410c63e req-62701f65-568a-4402-a9e4-be0e736e0ffb service nova] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.697766] env[61852]: DEBUG nova.compute.manager [req-2ef7a2bf-7e1c-42df-adea-81fff410c63e req-62701f65-568a-4402-a9e4-be0e736e0ffb service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] No waiting events found dispatching network-vif-plugged-41d20024-17d1-4e43-ad02-a6316dcc9c2f {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 947.697960] env[61852]: WARNING nova.compute.manager [req-2ef7a2bf-7e1c-42df-adea-81fff410c63e req-62701f65-568a-4402-a9e4-be0e736e0ffb service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Received unexpected event network-vif-plugged-41d20024-17d1-4e43-ad02-a6316dcc9c2f for instance with vm_state building and task_state spawning. [ 947.743550] env[61852]: DEBUG oslo_vmware.api [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293207, 'name': PowerOffVM_Task, 'duration_secs': 0.212152} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.743848] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 947.744049] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 947.744307] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-226273f5-e2d9-46e5-9181-c88a3b0a38ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.803931] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 947.804190] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 947.804409] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleting the datastore file [datastore1] 561d33d0-cad5-48ae-bd32-5de2220c5283 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 947.804794] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a172b46-e977-4a51-999e-36f424664895 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.813046] env[61852]: DEBUG oslo_vmware.api [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 947.813046] env[61852]: value = "task-1293209" [ 947.813046] env[61852]: _type = "Task" [ 947.813046] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.820563] env[61852]: DEBUG oslo_vmware.api [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.997850] env[61852]: DEBUG nova.scheduler.client.report [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 111 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 947.998193] env[61852]: DEBUG nova.compute.provider_tree [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 111 to 112 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 947.998337] env[61852]: DEBUG nova.compute.provider_tree [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.022323] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Creating linked-clone VM from snapshot {{(pid=61852) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 948.022836] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c17f4e33-fa29-4d75-8866-63ba04eae62f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.030937] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 948.030937] env[61852]: value = "task-1293210" [ 948.030937] env[61852]: _type = "Task" [ 948.030937] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.040863] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293210, 'name': CloneVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.259184] env[61852]: DEBUG nova.network.neutron [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Successfully updated port: 41d20024-17d1-4e43-ad02-a6316dcc9c2f {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.282478] env[61852]: DEBUG nova.compute.manager [req-f8e9708f-ae9b-4366-b3b5-8a4d8b2eb3f0 req-a442fb96-bb56-4bdf-b614-8a8c1657bf2d service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Received event network-changed-41d20024-17d1-4e43-ad02-a6316dcc9c2f {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.282690] env[61852]: DEBUG nova.compute.manager [req-f8e9708f-ae9b-4366-b3b5-8a4d8b2eb3f0 req-a442fb96-bb56-4bdf-b614-8a8c1657bf2d service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Refreshing instance network info cache due to event network-changed-41d20024-17d1-4e43-ad02-a6316dcc9c2f. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 948.282931] env[61852]: DEBUG oslo_concurrency.lockutils [req-f8e9708f-ae9b-4366-b3b5-8a4d8b2eb3f0 req-a442fb96-bb56-4bdf-b614-8a8c1657bf2d service nova] Acquiring lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.283105] env[61852]: DEBUG oslo_concurrency.lockutils [req-f8e9708f-ae9b-4366-b3b5-8a4d8b2eb3f0 req-a442fb96-bb56-4bdf-b614-8a8c1657bf2d service nova] Acquired lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.283286] env[61852]: DEBUG nova.network.neutron [req-f8e9708f-ae9b-4366-b3b5-8a4d8b2eb3f0 req-a442fb96-bb56-4bdf-b614-8a8c1657bf2d service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Refreshing network info cache for port 41d20024-17d1-4e43-ad02-a6316dcc9c2f {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 948.324919] env[61852]: DEBUG oslo_vmware.api [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293209, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139639} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.325236] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.325432] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 948.325727] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 948.325954] env[61852]: INFO nova.compute.manager [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Took 1.11 seconds to destroy the instance on the hypervisor. [ 948.326228] env[61852]: DEBUG oslo.service.loopingcall [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 948.326429] env[61852]: DEBUG nova.compute.manager [-] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 948.326540] env[61852]: DEBUG nova.network.neutron [-] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 948.503354] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.034s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.506082] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.948s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.507497] env[61852]: INFO nova.compute.claims [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.542870] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293210, 'name': CloneVM_Task} progress is 94%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.762913] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.817509] env[61852]: DEBUG nova.network.neutron [req-f8e9708f-ae9b-4366-b3b5-8a4d8b2eb3f0 req-a442fb96-bb56-4bdf-b614-8a8c1657bf2d service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.908328] env[61852]: DEBUG nova.network.neutron [req-f8e9708f-ae9b-4366-b3b5-8a4d8b2eb3f0 req-a442fb96-bb56-4bdf-b614-8a8c1657bf2d service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.016885] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6782807a-103b-4da8-9747-55bcd17310c9 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "8d8679db-eb9d-45c1-b053-70378f58e273" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.906s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.017813] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "8d8679db-eb9d-45c1-b053-70378f58e273" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.912s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.018025] env[61852]: INFO nova.compute.manager [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Unshelving [ 949.043188] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293210, 'name': CloneVM_Task} progress is 100%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.085243] env[61852]: DEBUG nova.network.neutron [-] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.411205] env[61852]: DEBUG oslo_concurrency.lockutils [req-f8e9708f-ae9b-4366-b3b5-8a4d8b2eb3f0 req-a442fb96-bb56-4bdf-b614-8a8c1657bf2d service nova] Releasing lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.411585] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.411746] env[61852]: DEBUG nova.network.neutron [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.542701] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293210, 'name': CloneVM_Task, 'duration_secs': 1.032284} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.545488] env[61852]: INFO nova.virt.vmwareapi.vmops [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Created linked-clone VM from snapshot [ 949.546840] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3b57a6-21c1-4aa6-a96e-bdc610ab92c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.554659] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Uploading image 5c723b00-4748-4927-a9ab-d68350dc536e {{(pid=61852) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 949.567989] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Destroying the VM {{(pid=61852) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 949.568303] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ab51d243-c71c-4ce8-a8d5-8b080fc74d0e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.577521] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 949.577521] env[61852]: value = "task-1293211" [ 949.577521] env[61852]: _type = "Task" [ 949.577521] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.590674] env[61852]: INFO nova.compute.manager [-] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Took 1.26 seconds to deallocate network for instance. [ 949.591057] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293211, 'name': Destroy_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.686158] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946ce76d-1787-4a37-aa31-134df48a1795 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.693958] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df7c409-a4b5-45e0-9de9-7bc1a5390013 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.724786] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27350792-4406-4a7b-a129-a004e14732b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.732182] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15a7eba-8b5f-497f-b031-e3d9e82047d2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.745472] env[61852]: DEBUG nova.compute.provider_tree [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.952328] env[61852]: DEBUG nova.network.neutron [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 950.046450] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.087378] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293211, 'name': Destroy_Task, 'duration_secs': 0.400679} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.087667] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Destroyed the VM [ 950.087915] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Deleting Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 950.088194] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f55fa4e0-9f8f-4d9f-a6b4-970584e8ffe5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.094507] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 950.094507] env[61852]: value = "task-1293212" [ 950.094507] env[61852]: _type = "Task" [ 950.094507] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.103013] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.103370] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293212, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.104353] env[61852]: DEBUG nova.network.neutron [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance_info_cache with network_info: [{"id": "41d20024-17d1-4e43-ad02-a6316dcc9c2f", "address": "fa:16:3e:96:00:1c", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d20024-17", "ovs_interfaceid": "41d20024-17d1-4e43-ad02-a6316dcc9c2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.249065] env[61852]: DEBUG nova.scheduler.client.report [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 950.307636] env[61852]: DEBUG nova.compute.manager [req-ea90cedb-db9c-44ae-9efe-af68468144fd req-7c49225f-c043-4d6f-8cfb-673a095d16b3 service nova] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Received event network-vif-deleted-983b26a7-ea32-4616-8527-24b24ecdb040 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 950.604765] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293212, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.607107] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.607107] env[61852]: DEBUG nova.compute.manager [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Instance network_info: |[{"id": "41d20024-17d1-4e43-ad02-a6316dcc9c2f", "address": "fa:16:3e:96:00:1c", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d20024-17", "ovs_interfaceid": "41d20024-17d1-4e43-ad02-a6316dcc9c2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 950.607341] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:00:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41d20024-17d1-4e43-ad02-a6316dcc9c2f', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.614636] env[61852]: DEBUG oslo.service.loopingcall [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.614848] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.615085] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46c3796d-9d78-48c2-8de3-d3d73d06457c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.634032] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.634032] env[61852]: value = "task-1293213" [ 950.634032] env[61852]: _type = "Task" [ 950.634032] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.640935] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293213, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.755198] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.248s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.755198] env[61852]: DEBUG nova.compute.manager [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 950.757427] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.829s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.757630] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.759727] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.714s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.759946] env[61852]: DEBUG nova.objects.instance [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lazy-loading 'pci_requests' on Instance uuid 8d8679db-eb9d-45c1-b053-70378f58e273 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.782384] env[61852]: INFO nova.scheduler.client.report [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted allocations for instance 51ecc9c3-a3fc-4bd7-8c90-003451700212 [ 951.105805] env[61852]: DEBUG oslo_vmware.api [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293212, 'name': RemoveSnapshot_Task, 'duration_secs': 0.872674} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.106176] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Deleted Snapshot of the VM instance {{(pid=61852) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 951.143718] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293213, 'name': CreateVM_Task, 'duration_secs': 0.356299} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.143933] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 951.144568] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.144740] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.145086] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 951.145348] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5a911ec-36dd-40d0-9e00-dbd32bf416b4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.149572] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 951.149572] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52449f76-3cc2-fc6a-3032-f656fdb3d14c" [ 951.149572] env[61852]: _type = "Task" [ 951.149572] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.157015] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52449f76-3cc2-fc6a-3032-f656fdb3d14c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.262716] env[61852]: DEBUG nova.compute.utils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 951.265346] env[61852]: DEBUG nova.objects.instance [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lazy-loading 'numa_topology' on Instance uuid 8d8679db-eb9d-45c1-b053-70378f58e273 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 951.266276] env[61852]: DEBUG nova.compute.manager [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 951.266437] env[61852]: DEBUG nova.network.neutron [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 951.289093] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5d1221c5-0832-4b68-83bd-5a132ad3d170 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "51ecc9c3-a3fc-4bd7-8c90-003451700212" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.254s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.304191] env[61852]: DEBUG nova.policy [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeca45e07f5b41e38b9ab8ac31bad06c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14a017ea2b084ae0ad2994dda7809c7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 951.535069] env[61852]: DEBUG nova.network.neutron [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Successfully created port: 16ad03e7-b72d-4cdd-8da7-5314a7cad855 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.611116] env[61852]: WARNING nova.compute.manager [None req-3fc77ece-bf20-4a9f-9427-e632551ead1c tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Image not found during snapshot: nova.exception.ImageNotFound: Image 5c723b00-4748-4927-a9ab-d68350dc536e could not be found. [ 951.660192] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52449f76-3cc2-fc6a-3032-f656fdb3d14c, 'name': SearchDatastore_Task, 'duration_secs': 0.008852} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.660478] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.660719] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.660957] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.661271] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.661321] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.661553] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f0f902b-e353-42b6-8669-b764ba1011ce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.679142] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.679372] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.680166] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cfd4241-c9cf-46ac-83d7-ec6928516fb3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.685703] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 951.685703] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5273bc27-1e14-bbcc-0c5f-363dc27eb7fd" [ 951.685703] env[61852]: _type = "Task" [ 951.685703] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.694748] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5273bc27-1e14-bbcc-0c5f-363dc27eb7fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.769340] env[61852]: DEBUG nova.compute.manager [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 951.772417] env[61852]: INFO nova.compute.claims [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.196312] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5273bc27-1e14-bbcc-0c5f-363dc27eb7fd, 'name': SearchDatastore_Task, 'duration_secs': 0.077188} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.197141] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d3bb2c5-c6d9-44b4-add1-aa59860dba12 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.202420] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 952.202420] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52eda882-8d55-6d88-e941-3332b9e019f4" [ 952.202420] env[61852]: _type = "Task" [ 952.202420] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.210997] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52eda882-8d55-6d88-e941-3332b9e019f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.642792] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "ba863c60-444a-4959-8f8f-87b4952d2872" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.643097] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "ba863c60-444a-4959-8f8f-87b4952d2872" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.643328] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "ba863c60-444a-4959-8f8f-87b4952d2872-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.643523] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "ba863c60-444a-4959-8f8f-87b4952d2872-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.643730] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "ba863c60-444a-4959-8f8f-87b4952d2872-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.646117] env[61852]: INFO nova.compute.manager [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Terminating instance [ 952.647948] env[61852]: DEBUG nova.compute.manager [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 952.648171] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.649006] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd95ec2-6c7e-4000-910c-b89cd3d99e1d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.657259] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.657507] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f79b8a6-9663-4c83-a36c-450f542ab6f6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.663946] env[61852]: DEBUG oslo_vmware.api [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 952.663946] env[61852]: value = "task-1293214" [ 952.663946] env[61852]: _type = "Task" [ 952.663946] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.671236] env[61852]: DEBUG oslo_vmware.api [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.712908] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52eda882-8d55-6d88-e941-3332b9e019f4, 'name': SearchDatastore_Task, 'duration_secs': 0.011998} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.713119] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.713281] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] fb75509e-3cbf-406e-ad2d-aeb51a68295d/fb75509e-3cbf-406e-ad2d-aeb51a68295d.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.713545] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14f2f30f-dc8f-4c22-9003-0ef1397d94b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.720101] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 952.720101] env[61852]: value = "task-1293215" [ 952.720101] env[61852]: _type = "Task" [ 952.720101] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.727370] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293215, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.781216] env[61852]: DEBUG nova.compute.manager [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 952.809949] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 952.810217] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 952.810383] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.810574] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 952.810728] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.810879] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 952.811100] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 952.811270] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 952.811435] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 952.811601] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 952.811808] env[61852]: DEBUG nova.virt.hardware [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 952.813102] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0987d053-4389-40e7-b47f-dfdeb61d7ee0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.820783] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fdc016-9ecb-4975-a694-7a5dd15e16f3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.925434] env[61852]: DEBUG nova.compute.manager [req-161f3e89-4390-4188-ac92-7331a3fbdfbb req-1021108c-62b1-48c1-ab69-9e54fdb20733 service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Received event network-vif-plugged-16ad03e7-b72d-4cdd-8da7-5314a7cad855 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 952.925715] env[61852]: DEBUG oslo_concurrency.lockutils [req-161f3e89-4390-4188-ac92-7331a3fbdfbb req-1021108c-62b1-48c1-ab69-9e54fdb20733 service nova] Acquiring lock "df332116-2ae3-4e51-99b0-108921470959-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.925950] env[61852]: DEBUG oslo_concurrency.lockutils [req-161f3e89-4390-4188-ac92-7331a3fbdfbb req-1021108c-62b1-48c1-ab69-9e54fdb20733 service nova] Lock "df332116-2ae3-4e51-99b0-108921470959-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.926173] env[61852]: DEBUG oslo_concurrency.lockutils [req-161f3e89-4390-4188-ac92-7331a3fbdfbb req-1021108c-62b1-48c1-ab69-9e54fdb20733 service nova] Lock "df332116-2ae3-4e51-99b0-108921470959-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.926358] env[61852]: DEBUG nova.compute.manager [req-161f3e89-4390-4188-ac92-7331a3fbdfbb req-1021108c-62b1-48c1-ab69-9e54fdb20733 service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] No waiting events found dispatching network-vif-plugged-16ad03e7-b72d-4cdd-8da7-5314a7cad855 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 952.926593] env[61852]: WARNING nova.compute.manager [req-161f3e89-4390-4188-ac92-7331a3fbdfbb req-1021108c-62b1-48c1-ab69-9e54fdb20733 service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Received unexpected event network-vif-plugged-16ad03e7-b72d-4cdd-8da7-5314a7cad855 for instance with vm_state building and task_state spawning. [ 952.928893] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdb8309-cd0e-4a4b-b179-5e71cfbf8ea4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.938941] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c369106d-ebc5-491c-91e5-a716295ca43e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.972734] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117ef787-e658-49f2-a366-84c4faa3c31f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.980686] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0811b378-6434-4c5b-8c5c-5d826ec0551f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.994810] env[61852]: DEBUG nova.compute.provider_tree [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.174715] env[61852]: DEBUG oslo_vmware.api [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293214, 'name': PowerOffVM_Task, 'duration_secs': 0.149444} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.175155] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 953.175343] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 953.175763] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b9280ab-8c77-4db3-b67e-ef57be0d7e6b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.229548] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293215, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.250585] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 953.250817] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 953.251010] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleting the datastore file [datastore1] ba863c60-444a-4959-8f8f-87b4952d2872 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.251295] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a242121-b2e9-4faf-8c92-6eb98ad1d7f6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.257964] env[61852]: DEBUG oslo_vmware.api [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for the task: (returnval){ [ 953.257964] env[61852]: value = "task-1293217" [ 953.257964] env[61852]: _type = "Task" [ 953.257964] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.265819] env[61852]: DEBUG oslo_vmware.api [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.508161] env[61852]: DEBUG nova.network.neutron [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Successfully updated port: 16ad03e7-b72d-4cdd-8da7-5314a7cad855 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 953.524675] env[61852]: DEBUG nova.scheduler.client.report [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 112 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 953.525013] env[61852]: DEBUG nova.compute.provider_tree [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 112 to 113 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 953.525269] env[61852]: DEBUG nova.compute.provider_tree [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.530990] env[61852]: DEBUG nova.compute.manager [req-421cecab-93a7-43b5-a24e-99de331b696e req-57761dcc-b92e-4b5b-adb0-262db5e876dc service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Received event network-changed-16ad03e7-b72d-4cdd-8da7-5314a7cad855 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.530990] env[61852]: DEBUG nova.compute.manager [req-421cecab-93a7-43b5-a24e-99de331b696e req-57761dcc-b92e-4b5b-adb0-262db5e876dc service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Refreshing instance network info cache due to event network-changed-16ad03e7-b72d-4cdd-8da7-5314a7cad855. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 953.530990] env[61852]: DEBUG oslo_concurrency.lockutils [req-421cecab-93a7-43b5-a24e-99de331b696e req-57761dcc-b92e-4b5b-adb0-262db5e876dc service nova] Acquiring lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.530990] env[61852]: DEBUG oslo_concurrency.lockutils [req-421cecab-93a7-43b5-a24e-99de331b696e req-57761dcc-b92e-4b5b-adb0-262db5e876dc service nova] Acquired lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.531166] env[61852]: DEBUG nova.network.neutron [req-421cecab-93a7-43b5-a24e-99de331b696e req-57761dcc-b92e-4b5b-adb0-262db5e876dc service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Refreshing network info cache for port 16ad03e7-b72d-4cdd-8da7-5314a7cad855 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 953.732186] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293215, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512008} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.732458] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] fb75509e-3cbf-406e-ad2d-aeb51a68295d/fb75509e-3cbf-406e-ad2d-aeb51a68295d.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.732655] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.732910] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-750e6c00-1aec-4965-9dcd-21a0ee8bd88c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.741021] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 953.741021] env[61852]: value = "task-1293218" [ 953.741021] env[61852]: _type = "Task" [ 953.741021] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.748946] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293218, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.766309] env[61852]: DEBUG oslo_vmware.api [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Task: {'id': task-1293217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.487675} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.766587] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.766824] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.767565] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.767565] env[61852]: INFO nova.compute.manager [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Took 1.12 seconds to destroy the instance on the hypervisor. [ 953.767565] env[61852]: DEBUG oslo.service.loopingcall [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.767698] env[61852]: DEBUG nova.compute.manager [-] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 953.767786] env[61852]: DEBUG nova.network.neutron [-] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.011684] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.033090] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.273s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.036825] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.934s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.037084] env[61852]: DEBUG nova.objects.instance [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lazy-loading 'resources' on Instance uuid 561d33d0-cad5-48ae-bd32-5de2220c5283 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.063935] env[61852]: DEBUG nova.network.neutron [req-421cecab-93a7-43b5-a24e-99de331b696e req-57761dcc-b92e-4b5b-adb0-262db5e876dc service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.067026] env[61852]: INFO nova.network.neutron [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updating port 83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 954.139260] env[61852]: DEBUG nova.network.neutron [req-421cecab-93a7-43b5-a24e-99de331b696e req-57761dcc-b92e-4b5b-adb0-262db5e876dc service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.252808] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293218, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071624} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.253168] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.253987] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90b10ef-af50-47de-a88f-b5aa0d939057 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.278254] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] fb75509e-3cbf-406e-ad2d-aeb51a68295d/fb75509e-3cbf-406e-ad2d-aeb51a68295d.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.278927] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d958a647-f99a-4700-a899-bd3bc5545448 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.307424] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 954.307424] env[61852]: value = "task-1293219" [ 954.307424] env[61852]: _type = "Task" [ 954.307424] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.316115] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293219, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.505129] env[61852]: DEBUG nova.network.neutron [-] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.640874] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2effcdf1-ea6c-478a-b5dd-cf8b58a0e7aa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.643643] env[61852]: DEBUG oslo_concurrency.lockutils [req-421cecab-93a7-43b5-a24e-99de331b696e req-57761dcc-b92e-4b5b-adb0-262db5e876dc service nova] Releasing lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.643980] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.644158] env[61852]: DEBUG nova.network.neutron [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 954.650146] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f233a9fc-91bc-4cc8-94e2-74430b641849 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.681602] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ef1b32-eec2-4bf3-a362-ea6c18013eb2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.688775] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22deb756-8e90-46e6-a671-a0de457769be {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.702168] env[61852]: DEBUG nova.compute.provider_tree [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 954.817748] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293219, 'name': ReconfigVM_Task, 'duration_secs': 0.290405} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.818087] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Reconfigured VM instance instance-0000005b to attach disk [datastore1] fb75509e-3cbf-406e-ad2d-aeb51a68295d/fb75509e-3cbf-406e-ad2d-aeb51a68295d.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.818751] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5e4d3ca-3593-45cd-b777-e5e9cebd89b6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.824376] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 954.824376] env[61852]: value = "task-1293220" [ 954.824376] env[61852]: _type = "Task" [ 954.824376] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.835878] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293220, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.935731] env[61852]: DEBUG oslo_concurrency.lockutils [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "interface-4623565b-cd36-498c-a0e9-c3b1c6ef479b-825f3034-375c-417d-9d76-971f3239ff59" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.935731] env[61852]: DEBUG oslo_concurrency.lockutils [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-4623565b-cd36-498c-a0e9-c3b1c6ef479b-825f3034-375c-417d-9d76-971f3239ff59" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.936233] env[61852]: DEBUG nova.objects.instance [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'flavor' on Instance uuid 4623565b-cd36-498c-a0e9-c3b1c6ef479b {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.007561] env[61852]: INFO nova.compute.manager [-] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Took 1.24 seconds to deallocate network for instance. [ 955.173785] env[61852]: DEBUG nova.network.neutron [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.231523] env[61852]: DEBUG nova.scheduler.client.report [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 113 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 955.231797] env[61852]: DEBUG nova.compute.provider_tree [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 113 to 114 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 955.231979] env[61852]: DEBUG nova.compute.provider_tree [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 955.304709] env[61852]: DEBUG nova.network.neutron [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance_info_cache with network_info: [{"id": "16ad03e7-b72d-4cdd-8da7-5314a7cad855", "address": "fa:16:3e:06:57:79", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ad03e7-b7", "ovs_interfaceid": "16ad03e7-b72d-4cdd-8da7-5314a7cad855", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.334256] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293220, 'name': Rename_Task, 'duration_secs': 0.131837} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.334551] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.334791] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abea4021-3514-4129-80ff-6350af8d95aa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.341095] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 955.341095] env[61852]: value = "task-1293221" [ 955.341095] env[61852]: _type = "Task" [ 955.341095] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.348166] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293221, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.513796] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.521028] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.521207] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.521388] env[61852]: DEBUG nova.network.neutron [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 955.559027] env[61852]: DEBUG nova.compute.manager [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Received event network-vif-deleted-5b69df93-12bd-4374-9aa2-76e5c7e7ddb9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.559256] env[61852]: DEBUG nova.compute.manager [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Received event network-vif-plugged-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.559471] env[61852]: DEBUG oslo_concurrency.lockutils [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] Acquiring lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.559698] env[61852]: DEBUG oslo_concurrency.lockutils [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] Lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.559878] env[61852]: DEBUG oslo_concurrency.lockutils [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] Lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.560091] env[61852]: DEBUG nova.compute.manager [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] No waiting events found dispatching network-vif-plugged-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 955.560302] env[61852]: WARNING nova.compute.manager [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Received unexpected event network-vif-plugged-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b for instance with vm_state shelved_offloaded and task_state spawning. [ 955.560473] env[61852]: DEBUG nova.compute.manager [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Received event network-changed-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.560632] env[61852]: DEBUG nova.compute.manager [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Refreshing instance network info cache due to event network-changed-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 955.560809] env[61852]: DEBUG oslo_concurrency.lockutils [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] Acquiring lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.561704] env[61852]: DEBUG nova.objects.instance [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'pci_requests' on Instance uuid 4623565b-cd36-498c-a0e9-c3b1c6ef479b {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.737379] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.740048] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.226s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.740433] env[61852]: DEBUG nova.objects.instance [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lazy-loading 'resources' on Instance uuid ba863c60-444a-4959-8f8f-87b4952d2872 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.756694] env[61852]: INFO nova.scheduler.client.report [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted allocations for instance 561d33d0-cad5-48ae-bd32-5de2220c5283 [ 955.807479] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.807814] env[61852]: DEBUG nova.compute.manager [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Instance network_info: |[{"id": "16ad03e7-b72d-4cdd-8da7-5314a7cad855", "address": "fa:16:3e:06:57:79", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ad03e7-b7", "ovs_interfaceid": "16ad03e7-b72d-4cdd-8da7-5314a7cad855", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 955.808293] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:57:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16ad03e7-b72d-4cdd-8da7-5314a7cad855', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.816800] env[61852]: DEBUG oslo.service.loopingcall [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.817385] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df332116-2ae3-4e51-99b0-108921470959] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 955.817656] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c10cc137-6ea8-4b3f-809c-c009d3feccaa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.837705] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.837705] env[61852]: value = "task-1293222" [ 955.837705] env[61852]: _type = "Task" [ 955.837705] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.847407] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293222, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.852211] env[61852]: DEBUG oslo_vmware.api [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293221, 'name': PowerOnVM_Task, 'duration_secs': 0.411431} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.852481] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.852691] env[61852]: INFO nova.compute.manager [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Took 8.36 seconds to spawn the instance on the hypervisor. [ 955.852887] env[61852]: DEBUG nova.compute.manager [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 955.853655] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9259e272-1606-46ce-b3c7-6709baffeb71 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.064266] env[61852]: DEBUG nova.objects.base [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Object Instance<4623565b-cd36-498c-a0e9-c3b1c6ef479b> lazy-loaded attributes: flavor,pci_requests {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 956.064505] env[61852]: DEBUG nova.network.neutron [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 956.130066] env[61852]: DEBUG nova.policy [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f04d129452d4eb79514c52a6972af0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e93a6965a6884292bc56b01f7d54a622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 956.264642] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e0a81040-2d17-43cb-8130-28f56c4f1976 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "561d33d0-cad5-48ae-bd32-5de2220c5283" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.055s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.313574] env[61852]: DEBUG nova.network.neutron [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updating instance_info_cache with network_info: [{"id": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "address": "fa:16:3e:82:81:71", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83bdd4e5-89", "ovs_interfaceid": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.351852] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293222, 'name': CreateVM_Task, 'duration_secs': 0.334157} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.352040] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df332116-2ae3-4e51-99b0-108921470959] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.352703] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.352873] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.353219] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 956.353476] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80f1f47f-5215-4599-ab02-3b2f204fcd5c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.361630] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 956.361630] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a3516c-46af-86ac-4223-1e5c5a44ab9d" [ 956.361630] env[61852]: _type = "Task" [ 956.361630] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.373793] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a3516c-46af-86ac-4223-1e5c5a44ab9d, 'name': SearchDatastore_Task, 'duration_secs': 0.00887} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.374512] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9152ac-ee28-4f41-98f4-d27a27e26179 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.377134] env[61852]: INFO nova.compute.manager [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Took 13.12 seconds to build instance. [ 956.378081] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.378258] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.378492] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.378640] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.378820] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 956.379235] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9feb3def-99cf-46fd-9276-5ed1b4088a21 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.385410] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd3e04b-bd03-42a6-968a-681cd62bc658 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.390174] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 956.390355] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 956.391305] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-568a38df-b67d-4059-b0f1-24d3138dff51 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.420076] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58effe30-0202-4571-839c-307354c9c32c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.424272] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 956.424272] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5264501a-52e9-12bf-f3fd-abc15e7f40a8" [ 956.424272] env[61852]: _type = "Task" [ 956.424272] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.432304] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2e855d-f0da-4cb0-92e5-323b73983963 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.438406] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5264501a-52e9-12bf-f3fd-abc15e7f40a8, 'name': SearchDatastore_Task, 'duration_secs': 0.008647} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.439478] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25a6db4d-09db-4641-a6d9-7f493e47fcb5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.449208] env[61852]: DEBUG nova.compute.provider_tree [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.454339] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 956.454339] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526cd193-2660-ac96-c9c6-7cdfb2910c76" [ 956.454339] env[61852]: _type = "Task" [ 956.454339] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.460464] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526cd193-2660-ac96-c9c6-7cdfb2910c76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.816945] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.819337] env[61852]: DEBUG oslo_concurrency.lockutils [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] Acquired lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.819530] env[61852]: DEBUG nova.network.neutron [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Refreshing network info cache for port 83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.847865] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='bf8d4982837fabd01386cc7fb6a2b3c0',container_format='bare',created_at=2024-10-15T17:27:07Z,direct_url=,disk_format='vmdk',id=f5e5a587-44f8-4b6c-b924-cca27583fcf9,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-37378641-shelved',owner='1fdd2d4aeb954b6fae049090b32f657b',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2024-10-15T17:27:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 956.848069] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 956.848258] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.848512] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 956.848594] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.848744] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 956.849025] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 956.849127] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 956.849304] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 956.849471] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 956.849648] env[61852]: DEBUG nova.virt.hardware [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 956.850746] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390f7da2-285a-474e-bdc8-0b4722f76670 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.858988] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5a45cc-bd86-4d4c-ad64-8e9cf5769600 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.874389] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:81:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd48f0ef6-34e5-44d4-8baf-4470ed96ce73', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.881913] env[61852]: DEBUG oslo.service.loopingcall [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 956.882054] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e7339438-bab9-4dd6-ba07-63791edc565b tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.633s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.882245] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.882470] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b31bf02-397f-4d17-bda5-b0997c85d7d4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.901718] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.901718] env[61852]: value = "task-1293223" [ 956.901718] env[61852]: _type = "Task" [ 956.901718] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.909662] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293223, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.952332] env[61852]: DEBUG nova.scheduler.client.report [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 956.966658] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526cd193-2660-ac96-c9c6-7cdfb2910c76, 'name': SearchDatastore_Task, 'duration_secs': 0.008265} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.967096] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.967361] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] df332116-2ae3-4e51-99b0-108921470959/df332116-2ae3-4e51-99b0-108921470959.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.967630] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36af0443-c51b-49da-b234-1ffd5242deb2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.974674] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 956.974674] env[61852]: value = "task-1293224" [ 956.974674] env[61852]: _type = "Task" [ 956.974674] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.982701] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293224, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.065693] env[61852]: DEBUG oslo_concurrency.lockutils [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "cc5e0467-2960-43a1-bd7b-a528d5788028" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.066063] env[61852]: DEBUG oslo_concurrency.lockutils [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "cc5e0467-2960-43a1-bd7b-a528d5788028" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.066342] env[61852]: DEBUG oslo_concurrency.lockutils [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "cc5e0467-2960-43a1-bd7b-a528d5788028-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.066571] env[61852]: DEBUG oslo_concurrency.lockutils [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "cc5e0467-2960-43a1-bd7b-a528d5788028-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.066802] env[61852]: DEBUG oslo_concurrency.lockutils [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "cc5e0467-2960-43a1-bd7b-a528d5788028-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.071649] env[61852]: INFO nova.compute.manager [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Terminating instance [ 957.073426] env[61852]: DEBUG nova.compute.manager [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 957.073625] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 957.074458] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82b4c47-2bf9-47b5-a6a9-2113834db065 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.082530] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.082810] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-987c357c-b360-428e-a77a-5eff16d27fb2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.089436] env[61852]: DEBUG oslo_vmware.api [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 957.089436] env[61852]: value = "task-1293225" [ 957.089436] env[61852]: _type = "Task" [ 957.089436] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.097392] env[61852]: DEBUG oslo_vmware.api [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.412300] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293223, 'name': CreateVM_Task, 'duration_secs': 0.330436} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.412722] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 957.413296] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.413492] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.413919] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 957.414265] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81eb14f8-2167-40ff-9b7a-42543356a55e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.419889] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 957.419889] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526a702d-7155-914c-3fae-22e980c0190c" [ 957.419889] env[61852]: _type = "Task" [ 957.419889] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.439436] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.439716] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Processing image f5e5a587-44f8-4b6c-b924-cca27583fcf9 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.439971] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9/f5e5a587-44f8-4b6c-b924-cca27583fcf9.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.440121] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9/f5e5a587-44f8-4b6c-b924-cca27583fcf9.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.440307] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.440589] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab41bfd4-d035-4d75-9e0c-e2067ae8c9c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.448943] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.449182] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.449963] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08b1f903-5ad9-4bfb-8bb9-14e7bf16d1d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.454893] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 957.454893] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ac4e34-605d-3f33-cb34-a9a38408ae25" [ 957.454893] env[61852]: _type = "Task" [ 957.454893] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.460170] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.720s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.467292] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ac4e34-605d-3f33-cb34-a9a38408ae25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.483449] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293224, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499139} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.483684] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] df332116-2ae3-4e51-99b0-108921470959/df332116-2ae3-4e51-99b0-108921470959.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.483891] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.484257] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4abbbf3a-990d-44d2-8708-b229eea5535d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.488919] env[61852]: INFO nova.scheduler.client.report [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Deleted allocations for instance ba863c60-444a-4959-8f8f-87b4952d2872 [ 957.493916] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 957.493916] env[61852]: value = "task-1293226" [ 957.493916] env[61852]: _type = "Task" [ 957.493916] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.503467] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.598816] env[61852]: DEBUG oslo_vmware.api [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293225, 'name': PowerOffVM_Task, 'duration_secs': 0.263219} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.599093] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.599267] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 957.599519] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b3c8868-7f30-414e-bd37-3d4c3f5e53a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.631437] env[61852]: DEBUG nova.network.neutron [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updated VIF entry in instance network info cache for port 83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 957.632360] env[61852]: DEBUG nova.network.neutron [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updating instance_info_cache with network_info: [{"id": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "address": "fa:16:3e:82:81:71", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83bdd4e5-89", "ovs_interfaceid": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.658010] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 957.658268] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 957.658705] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleting the datastore file [datastore2] cc5e0467-2960-43a1-bd7b-a528d5788028 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.658981] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5faa6feb-b9a1-4548-9505-62c08754e908 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.665715] env[61852]: DEBUG oslo_vmware.api [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 957.665715] env[61852]: value = "task-1293228" [ 957.665715] env[61852]: _type = "Task" [ 957.665715] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.673995] env[61852]: DEBUG oslo_vmware.api [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.719453] env[61852]: DEBUG nova.compute.manager [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Stashing vm_state: active {{(pid=61852) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 957.744201] env[61852]: DEBUG nova.compute.manager [req-cda83ba8-b2a3-46cc-a522-40c6c16c59ea req-e82566a6-9bae-4df7-81a2-68e349ce2dc0 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received event network-vif-plugged-825f3034-375c-417d-9d76-971f3239ff59 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 957.744456] env[61852]: DEBUG oslo_concurrency.lockutils [req-cda83ba8-b2a3-46cc-a522-40c6c16c59ea req-e82566a6-9bae-4df7-81a2-68e349ce2dc0 service nova] Acquiring lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.744706] env[61852]: DEBUG oslo_concurrency.lockutils [req-cda83ba8-b2a3-46cc-a522-40c6c16c59ea req-e82566a6-9bae-4df7-81a2-68e349ce2dc0 service nova] Lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.744915] env[61852]: DEBUG oslo_concurrency.lockutils [req-cda83ba8-b2a3-46cc-a522-40c6c16c59ea req-e82566a6-9bae-4df7-81a2-68e349ce2dc0 service nova] Lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.745311] env[61852]: DEBUG nova.compute.manager [req-cda83ba8-b2a3-46cc-a522-40c6c16c59ea req-e82566a6-9bae-4df7-81a2-68e349ce2dc0 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] No waiting events found dispatching network-vif-plugged-825f3034-375c-417d-9d76-971f3239ff59 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 957.745452] env[61852]: WARNING nova.compute.manager [req-cda83ba8-b2a3-46cc-a522-40c6c16c59ea req-e82566a6-9bae-4df7-81a2-68e349ce2dc0 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received unexpected event network-vif-plugged-825f3034-375c-417d-9d76-971f3239ff59 for instance with vm_state active and task_state None. [ 957.804832] env[61852]: DEBUG nova.network.neutron [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Successfully updated port: 825f3034-375c-417d-9d76-971f3239ff59 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 957.965031] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Preparing fetch location {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 957.965303] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Fetch image to [datastore1] OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d/OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d.vmdk {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 957.965490] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Downloading stream optimized image f5e5a587-44f8-4b6c-b924-cca27583fcf9 to [datastore1] OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d/OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d.vmdk on the data store datastore1 as vApp {{(pid=61852) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 957.965662] env[61852]: DEBUG nova.virt.vmwareapi.images [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Downloading image file data f5e5a587-44f8-4b6c-b924-cca27583fcf9 to the ESX as VM named 'OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d' {{(pid=61852) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 958.010055] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0d16352c-7793-45f0-8e8b-b51143aa92b6 tempest-ImagesTestJSON-651191963 tempest-ImagesTestJSON-651191963-project-member] Lock "ba863c60-444a-4959-8f8f-87b4952d2872" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.367s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.016076] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057755} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.016279] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 958.017102] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183fea71-d8ba-46b5-86f1-64f18fee95be {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.041533] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] df332116-2ae3-4e51-99b0-108921470959/df332116-2ae3-4e51-99b0-108921470959.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.043044] env[61852]: DEBUG oslo_vmware.rw_handles [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 958.043044] env[61852]: value = "resgroup-9" [ 958.043044] env[61852]: _type = "ResourcePool" [ 958.043044] env[61852]: }. {{(pid=61852) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 958.043289] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c588a1b0-bd05-4bb0-8cb7-e314de5d85bb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.058110] env[61852]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-2c225370-105e-4ef2-b61d-9b76e9218bc0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.079933] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 958.079933] env[61852]: value = "task-1293229" [ 958.079933] env[61852]: _type = "Task" [ 958.079933] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.081424] env[61852]: DEBUG oslo_vmware.rw_handles [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lease: (returnval){ [ 958.081424] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 958.081424] env[61852]: _type = "HttpNfcLease" [ 958.081424] env[61852]: } obtained for vApp import into resource pool (val){ [ 958.081424] env[61852]: value = "resgroup-9" [ 958.081424] env[61852]: _type = "ResourcePool" [ 958.081424] env[61852]: }. {{(pid=61852) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 958.081688] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the lease: (returnval){ [ 958.081688] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 958.081688] env[61852]: _type = "HttpNfcLease" [ 958.081688] env[61852]: } to be ready. {{(pid=61852) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 958.093684] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293229, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.093896] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 958.093896] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 958.093896] env[61852]: _type = "HttpNfcLease" [ 958.093896] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 958.136416] env[61852]: DEBUG oslo_concurrency.lockutils [req-12448ae0-8f6c-40f9-937f-4cac6675181f req-1c0d770a-63ff-485b-9ef2-42f7ac540a08 service nova] Releasing lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.177765] env[61852]: DEBUG oslo_vmware.api [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134087} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.178066] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.178230] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 958.178417] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 958.178596] env[61852]: INFO nova.compute.manager [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Took 1.10 seconds to destroy the instance on the hypervisor. [ 958.178900] env[61852]: DEBUG oslo.service.loopingcall [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.179067] env[61852]: DEBUG nova.compute.manager [-] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 958.179210] env[61852]: DEBUG nova.network.neutron [-] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 958.241863] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.242224] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.307566] env[61852]: DEBUG oslo_concurrency.lockutils [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.307835] env[61852]: DEBUG oslo_concurrency.lockutils [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.308110] env[61852]: DEBUG nova.network.neutron [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.593566] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 958.593566] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 958.593566] env[61852]: _type = "HttpNfcLease" [ 958.593566] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 958.597427] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293229, 'name': ReconfigVM_Task, 'duration_secs': 0.306778} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.597686] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Reconfigured VM instance instance-0000005c to attach disk [datastore1] df332116-2ae3-4e51-99b0-108921470959/df332116-2ae3-4e51-99b0-108921470959.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.598341] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5893fb7-9e9f-404c-8a00-2fc612f2dc54 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.604872] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 958.604872] env[61852]: value = "task-1293231" [ 958.604872] env[61852]: _type = "Task" [ 958.604872] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.612384] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293231, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.747423] env[61852]: INFO nova.compute.claims [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 958.860054] env[61852]: WARNING nova.network.neutron [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] d984a6fb-5f5f-4678-bc8a-3723c26f290a already exists in list: networks containing: ['d984a6fb-5f5f-4678-bc8a-3723c26f290a']. ignoring it [ 959.083974] env[61852]: DEBUG nova.network.neutron [-] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.095360] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 959.095360] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 959.095360] env[61852]: _type = "HttpNfcLease" [ 959.095360] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 959.114351] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293231, 'name': Rename_Task, 'duration_secs': 0.232019} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.116784] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.117078] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d81ec6d8-08f3-4f90-a345-c1e2779be0c7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.123290] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 959.123290] env[61852]: value = "task-1293232" [ 959.123290] env[61852]: _type = "Task" [ 959.123290] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.134611] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293232, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.254819] env[61852]: INFO nova.compute.resource_tracker [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating resource usage from migration a0ebf04b-b0d3-4993-a5ba-06cba9c38fe7 [ 959.349834] env[61852]: DEBUG nova.network.neutron [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [{"id": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "address": "fa:16:3e:b2:2d:44", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab757ae-eb", "ovs_interfaceid": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "825f3034-375c-417d-9d76-971f3239ff59", "address": "fa:16:3e:a6:a4:09", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825f3034-37", "ovs_interfaceid": "825f3034-375c-417d-9d76-971f3239ff59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.387242] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ce62b9-595c-4166-a176-f54eae77db9f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.395831] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc019500-65bb-4893-968a-0cb402a33962 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.430274] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da652f4a-cebc-4d04-b333-d3f5d20d4e52 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.438037] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539f513b-a2a6-485a-93dc-c8f0293abf20 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.451830] env[61852]: DEBUG nova.compute.provider_tree [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 959.587136] env[61852]: INFO nova.compute.manager [-] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Took 1.41 seconds to deallocate network for instance. [ 959.599284] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 959.599284] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 959.599284] env[61852]: _type = "HttpNfcLease" [ 959.599284] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 959.632829] env[61852]: DEBUG oslo_vmware.api [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293232, 'name': PowerOnVM_Task, 'duration_secs': 0.466828} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.633328] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 959.633570] env[61852]: INFO nova.compute.manager [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Took 6.85 seconds to spawn the instance on the hypervisor. [ 959.633759] env[61852]: DEBUG nova.compute.manager [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.634610] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3dfe4d2-00e1-4fbe-8c8f-98f0ef681dfa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.772894] env[61852]: DEBUG nova.compute.manager [req-919f775e-432e-45f9-89b0-b04f482118a7 req-06e84cd4-4f6c-4f95-b5e1-f97404f83f89 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received event network-changed-825f3034-375c-417d-9d76-971f3239ff59 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 959.773478] env[61852]: DEBUG nova.compute.manager [req-919f775e-432e-45f9-89b0-b04f482118a7 req-06e84cd4-4f6c-4f95-b5e1-f97404f83f89 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing instance network info cache due to event network-changed-825f3034-375c-417d-9d76-971f3239ff59. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 959.773478] env[61852]: DEBUG oslo_concurrency.lockutils [req-919f775e-432e-45f9-89b0-b04f482118a7 req-06e84cd4-4f6c-4f95-b5e1-f97404f83f89 service nova] Acquiring lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.852438] env[61852]: DEBUG oslo_concurrency.lockutils [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.853138] env[61852]: DEBUG oslo_concurrency.lockutils [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.853308] env[61852]: DEBUG oslo_concurrency.lockutils [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.853591] env[61852]: DEBUG oslo_concurrency.lockutils [req-919f775e-432e-45f9-89b0-b04f482118a7 req-06e84cd4-4f6c-4f95-b5e1-f97404f83f89 service nova] Acquired lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.853777] env[61852]: DEBUG nova.network.neutron [req-919f775e-432e-45f9-89b0-b04f482118a7 req-06e84cd4-4f6c-4f95-b5e1-f97404f83f89 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing network info cache for port 825f3034-375c-417d-9d76-971f3239ff59 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.855563] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6bc640-8b62-476e-bc46-569f2bcb9ddd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.872105] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 959.872346] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 959.872604] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 959.872682] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 959.872826] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 959.872989] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 959.873212] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 959.873449] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 959.873982] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 959.873982] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 959.873982] env[61852]: DEBUG nova.virt.hardware [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 959.880177] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Reconfiguring VM to attach interface {{(pid=61852) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 959.881077] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2afb57d-ae1b-494c-bacc-f7637cd5e0fd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.898404] env[61852]: DEBUG oslo_vmware.api [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 959.898404] env[61852]: value = "task-1293233" [ 959.898404] env[61852]: _type = "Task" [ 959.898404] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.906162] env[61852]: DEBUG oslo_vmware.api [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293233, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.973165] env[61852]: ERROR nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [req-683880fe-a39c-470f-b29e-6e3b8fee8367] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-683880fe-a39c-470f-b29e-6e3b8fee8367"}]} [ 959.990541] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 960.005516] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 960.005865] env[61852]: DEBUG nova.compute.provider_tree [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 960.016729] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 960.034313] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 960.096708] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 960.096708] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 960.096708] env[61852]: _type = "HttpNfcLease" [ 960.096708] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 960.100613] env[61852]: DEBUG oslo_concurrency.lockutils [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.144624] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b71bd7d-caff-4dcb-9cbd-fe83c0d2d5de {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.156688] env[61852]: INFO nova.compute.manager [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Took 14.62 seconds to build instance. [ 960.159397] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e9cabe-0f08-4357-9b85-4c97ba519d2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.191288] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60cb1be9-bd49-4ac5-91f1-151da8db5ecc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.199573] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a412ff-1eb8-402d-a4d8-0e0c58e427c9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.213766] env[61852]: DEBUG nova.compute.provider_tree [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 960.409735] env[61852]: DEBUG oslo_vmware.api [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293233, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.599701] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 960.599701] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 960.599701] env[61852]: _type = "HttpNfcLease" [ 960.599701] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 960.659440] env[61852]: DEBUG oslo_concurrency.lockutils [None req-2c2bcd30-52cf-4863-9450-ffd81b513bb4 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "df332116-2ae3-4e51-99b0-108921470959" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.133s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.684989] env[61852]: DEBUG nova.network.neutron [req-919f775e-432e-45f9-89b0-b04f482118a7 req-06e84cd4-4f6c-4f95-b5e1-f97404f83f89 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updated VIF entry in instance network info cache for port 825f3034-375c-417d-9d76-971f3239ff59. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 960.685535] env[61852]: DEBUG nova.network.neutron [req-919f775e-432e-45f9-89b0-b04f482118a7 req-06e84cd4-4f6c-4f95-b5e1-f97404f83f89 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [{"id": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "address": "fa:16:3e:b2:2d:44", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab757ae-eb", "ovs_interfaceid": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "825f3034-375c-417d-9d76-971f3239ff59", "address": "fa:16:3e:a6:a4:09", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825f3034-37", "ovs_interfaceid": "825f3034-375c-417d-9d76-971f3239ff59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.734350] env[61852]: ERROR nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [req-178b14af-58b3-4c4b-930e-7e4f3dea4e98] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-178b14af-58b3-4c4b-930e-7e4f3dea4e98"}]} [ 960.752538] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 960.768237] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 960.768237] env[61852]: DEBUG nova.compute.provider_tree [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 960.776719] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 960.804769] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 960.912773] env[61852]: DEBUG oslo_vmware.api [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293233, 'name': ReconfigVM_Task, 'duration_secs': 0.660429} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.913429] env[61852]: DEBUG oslo_concurrency.lockutils [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.913681] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Reconfigured VM to attach interface {{(pid=61852) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 960.918885] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afa7a9d-3ba0-43cf-8b82-d3f45983fa4c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.926969] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a57f509-857e-47f2-bb9b-92f8f8d023e4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.979924] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e514c9a1-7d50-421f-9947-7d0e08749406 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.992172] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88320c4-c03d-4f96-91ea-c3010a1969e4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.014708] env[61852]: DEBUG nova.compute.provider_tree [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 961.098155] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 961.098155] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 961.098155] env[61852]: _type = "HttpNfcLease" [ 961.098155] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 961.188578] env[61852]: DEBUG oslo_concurrency.lockutils [req-919f775e-432e-45f9-89b0-b04f482118a7 req-06e84cd4-4f6c-4f95-b5e1-f97404f83f89 service nova] Releasing lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.188870] env[61852]: DEBUG nova.compute.manager [req-919f775e-432e-45f9-89b0-b04f482118a7 req-06e84cd4-4f6c-4f95-b5e1-f97404f83f89 service nova] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Received event network-vif-deleted-c7387a83-80b1-43cf-8e49-88ed66f63c70 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 961.424692] env[61852]: DEBUG oslo_concurrency.lockutils [None req-027e5bce-fa16-4f71-840e-7ce80f40a50d tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-4623565b-cd36-498c-a0e9-c3b1c6ef479b-825f3034-375c-417d-9d76-971f3239ff59" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.489s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.539977] env[61852]: ERROR nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [req-8fb22631-06be-45cb-a963-c4279873aeb1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8fb22631-06be-45cb-a963-c4279873aeb1"}]} [ 961.555657] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 961.570246] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 961.570556] env[61852]: DEBUG nova.compute.provider_tree [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 961.582739] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 961.599168] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 961.599168] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 961.599168] env[61852]: _type = "HttpNfcLease" [ 961.599168] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 961.606584] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 961.732242] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13df5685-9b6c-496b-a92c-0a49dd760e72 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.740025] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cced615-d0a5-4586-acef-36adc1c889bf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.775365] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74fb8da-eefa-418a-8b1d-56bbacda650f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.778136] env[61852]: DEBUG nova.compute.manager [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Stashing vm_state: active {{(pid=61852) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 961.786354] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de45b46-282e-43b3-9710-49a0d118d514 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.813760] env[61852]: DEBUG nova.compute.provider_tree [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 962.099504] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 962.099504] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 962.099504] env[61852]: _type = "HttpNfcLease" [ 962.099504] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 962.305900] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.351941] env[61852]: DEBUG nova.scheduler.client.report [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 118 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 962.352369] env[61852]: DEBUG nova.compute.provider_tree [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 118 to 119 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 962.352631] env[61852]: DEBUG nova.compute.provider_tree [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 962.601092] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 962.601092] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 962.601092] env[61852]: _type = "HttpNfcLease" [ 962.601092] env[61852]: } is initializing. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 962.723142] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "interface-4623565b-cd36-498c-a0e9-c3b1c6ef479b-825f3034-375c-417d-9d76-971f3239ff59" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.723434] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-4623565b-cd36-498c-a0e9-c3b1c6ef479b-825f3034-375c-417d-9d76-971f3239ff59" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.858066] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 4.616s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.858209] env[61852]: INFO nova.compute.manager [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Migrating [ 962.864867] env[61852]: DEBUG oslo_concurrency.lockutils [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.764s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.865119] env[61852]: DEBUG nova.objects.instance [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lazy-loading 'resources' on Instance uuid cc5e0467-2960-43a1-bd7b-a528d5788028 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 963.101139] env[61852]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 963.101139] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 963.101139] env[61852]: _type = "HttpNfcLease" [ 963.101139] env[61852]: } is ready. {{(pid=61852) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 963.101493] env[61852]: DEBUG oslo_vmware.rw_handles [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 963.101493] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d1431f-ba0a-993a-859a-ce4f667f8908" [ 963.101493] env[61852]: _type = "HttpNfcLease" [ 963.101493] env[61852]: }. {{(pid=61852) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 963.102197] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0e11dc-6b19-4d37-ad40-2c694eb2d230 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.110404] env[61852]: DEBUG oslo_vmware.rw_handles [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d1ed2-95ce-c4c4-d9e1-a8a70ddcb499/disk-0.vmdk from lease info. {{(pid=61852) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 963.110596] env[61852]: DEBUG oslo_vmware.rw_handles [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d1ed2-95ce-c4c4-d9e1-a8a70ddcb499/disk-0.vmdk. {{(pid=61852) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 963.174742] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cf3a29b5-0bb0-4df2-b553-4204562eb61c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.226340] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.226585] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.227516] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9b8a87-8b51-43ba-ac30-ac8e078fc099 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.246642] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c47d1f-e36d-4edb-a0be-0e1d4c3eab68 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.273318] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Reconfiguring VM to detach interface {{(pid=61852) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 963.273654] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e138277-9199-4a62-b23f-d3594c980ac3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.292199] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 963.292199] env[61852]: value = "task-1293234" [ 963.292199] env[61852]: _type = "Task" [ 963.292199] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.300240] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.379025] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.379025] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.379025] env[61852]: DEBUG nova.network.neutron [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 963.425059] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquiring lock "d9715a56-249f-4c19-a55b-730d352248cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.425320] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lock "d9715a56-249f-4c19-a55b-730d352248cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.534010] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb3f585-dfcf-48f5-b087-2baff4b57a5f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.546333] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da5eb93-6b01-4301-86ad-e94f67f10411 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.580487] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9f9547-877b-4dd7-8aff-979e39b852b0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.589799] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d04d9f4-2d3e-4b82-a859-07f796ecc18f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.604844] env[61852]: DEBUG nova.compute.provider_tree [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.804422] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.927964] env[61852]: DEBUG nova.compute.manager [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 964.110225] env[61852]: DEBUG nova.scheduler.client.report [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 964.141094] env[61852]: DEBUG nova.network.neutron [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance_info_cache with network_info: [{"id": "41d20024-17d1-4e43-ad02-a6316dcc9c2f", "address": "fa:16:3e:96:00:1c", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d20024-17", "ovs_interfaceid": "41d20024-17d1-4e43-ad02-a6316dcc9c2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.310729] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.419926] env[61852]: DEBUG oslo_vmware.rw_handles [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Completed reading data from the image iterator. {{(pid=61852) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 964.422023] env[61852]: DEBUG oslo_vmware.rw_handles [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d1ed2-95ce-c4c4-d9e1-a8a70ddcb499/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 964.422023] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccd0f8c-230e-4485-8f87-525ac28b4b3b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.429258] env[61852]: DEBUG oslo_vmware.rw_handles [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d1ed2-95ce-c4c4-d9e1-a8a70ddcb499/disk-0.vmdk is in state: ready. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 964.429657] env[61852]: DEBUG oslo_vmware.rw_handles [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d1ed2-95ce-c4c4-d9e1-a8a70ddcb499/disk-0.vmdk. {{(pid=61852) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 964.430135] env[61852]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-bbcc1d52-f124-4c90-acd2-9b5706cc4deb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.453557] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.614508] env[61852]: DEBUG oslo_concurrency.lockutils [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.749s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.618878] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.313s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.635398] env[61852]: INFO nova.scheduler.client.report [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted allocations for instance cc5e0467-2960-43a1-bd7b-a528d5788028 [ 964.644337] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.672175] env[61852]: DEBUG oslo_vmware.rw_handles [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d1ed2-95ce-c4c4-d9e1-a8a70ddcb499/disk-0.vmdk. {{(pid=61852) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 964.672431] env[61852]: INFO nova.virt.vmwareapi.images [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Downloaded image file data f5e5a587-44f8-4b6c-b924-cca27583fcf9 [ 964.673311] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a41f991-8ce2-4513-a0c9-5fe466234f2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.690365] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3e50bd3-2d90-4da6-b982-1bc00d5c8e0f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.803798] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.816182] env[61852]: INFO nova.virt.vmwareapi.images [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] The imported VM was unregistered [ 964.818756] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Caching image {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 964.819027] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Creating directory with path [datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9 {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 964.819645] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34b31601-58e7-4dda-a9f6-eac862744d13 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.830322] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Created directory with path [datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9 {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 964.830501] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d/OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d.vmdk to [datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9/f5e5a587-44f8-4b6c-b924-cca27583fcf9.vmdk. {{(pid=61852) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 964.830740] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-db3c3775-4cef-467f-9c60-08f1c786cac1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.836579] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 964.836579] env[61852]: value = "task-1293236" [ 964.836579] env[61852]: _type = "Task" [ 964.836579] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.843610] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293236, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.124451] env[61852]: INFO nova.compute.claims [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.143265] env[61852]: DEBUG oslo_concurrency.lockutils [None req-779ec9df-a47e-442a-84b5-76cc0e3f6218 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "cc5e0467-2960-43a1-bd7b-a528d5788028" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.077s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.306365] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.345942] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293236, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.631051] env[61852]: INFO nova.compute.resource_tracker [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating resource usage from migration ccb20a30-bdc7-49d6-960d-caf0ea2fe3b6 [ 965.769372] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba437b7-f2e4-47f6-98cf-83af42c9458a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.777132] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082d4524-d225-4a07-932a-119b4b930f0b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.812588] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c270108c-4612-474b-ab41-1b98eef5c376 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.820414] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.823718] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eabdc49-824a-474d-8a96-f0cd4aa165b3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.839843] env[61852]: DEBUG nova.compute.provider_tree [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.851576] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293236, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.160711] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5c62f5-4874-4435-99a9-8a098841c02b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.181863] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance 'fb75509e-3cbf-406e-ad2d-aeb51a68295d' progress to 0 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 966.320299] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.345478] env[61852]: DEBUG nova.scheduler.client.report [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 966.352308] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293236, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.439581] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.439836] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.688983] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 966.688983] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-737cfc27-b901-4686-a577-9a7907001456 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.699486] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 966.699486] env[61852]: value = "task-1293237" [ 966.699486] env[61852]: _type = "Task" [ 966.699486] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.711280] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.820818] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.851967] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293236, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.853941] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.235s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.854225] env[61852]: INFO nova.compute.manager [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Migrating [ 966.861265] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.408s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.862783] env[61852]: INFO nova.compute.claims [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.941838] env[61852]: DEBUG nova.compute.manager [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 967.209230] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293237, 'name': PowerOffVM_Task, 'duration_secs': 0.214685} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.209539] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 967.209731] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance 'fb75509e-3cbf-406e-ad2d-aeb51a68295d' progress to 17 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 967.282355] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 967.282906] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 967.318204] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.348586] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293236, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.313978} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.348891] env[61852]: INFO nova.virt.vmwareapi.ds_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d/OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d.vmdk to [datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9/f5e5a587-44f8-4b6c-b924-cca27583fcf9.vmdk. [ 967.349087] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Cleaning up location [datastore1] OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 967.349286] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b783dd0c-23e2-49cb-8458-5d8e4bddc85d {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 967.349543] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a6f3d2d-c951-443c-b337-85f23ce8b8d2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.356192] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 967.356192] env[61852]: value = "task-1293238" [ 967.356192] env[61852]: _type = "Task" [ 967.356192] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.363611] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293238, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.375462] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.375626] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.375799] env[61852]: DEBUG nova.network.neutron [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 967.464217] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.717019] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 967.717019] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 967.717019] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 967.717019] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 967.717019] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 967.717019] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 967.717019] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 967.717968] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 967.718327] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 967.718591] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 967.718850] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 967.725688] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed4776c2-ba66-47ca-a859-4e5d10259b8c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.741994] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 967.741994] env[61852]: value = "task-1293239" [ 967.741994] env[61852]: _type = "Task" [ 967.741994] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.750148] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293239, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.788947] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 967.789291] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 967.818129] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.864889] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293238, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034395} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.865355] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 967.865524] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9/f5e5a587-44f8-4b6c-b924-cca27583fcf9.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.865837] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9/f5e5a587-44f8-4b6c-b924-cca27583fcf9.vmdk to [datastore1] 8d8679db-eb9d-45c1-b053-70378f58e273/8d8679db-eb9d-45c1-b053-70378f58e273.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.866206] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b98dba33-2f2a-4198-ab32-7eb60706c6fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.872550] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 967.872550] env[61852]: value = "task-1293240" [ 967.872550] env[61852]: _type = "Task" [ 967.872550] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.883704] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.015734] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3b6956-b9f2-4c6b-8f05-992ddd515b71 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.023301] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940bb5cb-aa10-4cd4-b6ff-53cc3c95c277 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.054857] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6216b0-3cc9-4368-98cf-b026fa6dde7e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.064443] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2494188-5dc1-406b-8aca-802c1e7ec0dc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.085068] env[61852]: DEBUG nova.compute.provider_tree [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.095298] env[61852]: DEBUG nova.network.neutron [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance_info_cache with network_info: [{"id": "16ad03e7-b72d-4cdd-8da7-5314a7cad855", "address": "fa:16:3e:06:57:79", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ad03e7-b7", "ovs_interfaceid": "16ad03e7-b72d-4cdd-8da7-5314a7cad855", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.253417] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293239, 'name': ReconfigVM_Task, 'duration_secs': 0.405975} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.253763] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance 'fb75509e-3cbf-406e-ad2d-aeb51a68295d' progress to 33 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 968.321674] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.384603] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293240, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.588334] env[61852]: DEBUG nova.scheduler.client.report [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 968.598082] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.761117] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 968.761117] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 968.761117] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 968.761117] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 968.761851] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 968.762326] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 968.762846] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 968.763304] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 968.763656] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 968.764034] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 968.764376] env[61852]: DEBUG nova.virt.hardware [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 968.769881] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Reconfiguring VM instance instance-0000005b to detach disk 2000 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 968.770169] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-379c82fe-fdee-4968-a994-f587bc4bf411 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.793221] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 968.793221] env[61852]: value = "task-1293241" [ 968.793221] env[61852]: _type = "Task" [ 968.793221] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.805558] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293241, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.823513] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.890770] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293240, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.094327] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.094928] env[61852]: DEBUG nova.compute.manager [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 969.097685] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.634s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.099149] env[61852]: INFO nova.compute.claims [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.305164] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293241, 'name': ReconfigVM_Task, 'duration_secs': 0.180041} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.305461] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Reconfigured VM instance instance-0000005b to detach disk 2000 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 969.306548] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4975321-4153-4456-a324-5d3af726a413 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.334755] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] fb75509e-3cbf-406e-ad2d-aeb51a68295d/fb75509e-3cbf-406e-ad2d-aeb51a68295d.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.336121] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41292c16-d9f8-493b-89d3-3f774e41c49a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.354034] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.360775] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 969.360775] env[61852]: value = "task-1293242" [ 969.360775] env[61852]: _type = "Task" [ 969.360775] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.372294] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293242, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.385094] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293240, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.603912] env[61852]: DEBUG nova.compute.utils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 969.605557] env[61852]: DEBUG nova.compute.manager [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 969.605744] env[61852]: DEBUG nova.network.neutron [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 969.653048] env[61852]: DEBUG nova.policy [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '563d7d18230e4884837fa464e885354d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cad1538cc4c54d7582daffda576baacf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 969.825097] env[61852]: DEBUG oslo_vmware.api [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293234, 'name': ReconfigVM_Task, 'duration_secs': 6.229191} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.825398] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.826027] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Reconfigured VM to detach interface {{(pid=61852) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 969.876120] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293242, 'name': ReconfigVM_Task, 'duration_secs': 0.317627} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.879523] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Reconfigured VM instance instance-0000005b to attach disk [datastore1] fb75509e-3cbf-406e-ad2d-aeb51a68295d/fb75509e-3cbf-406e-ad2d-aeb51a68295d.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 969.879809] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance 'fb75509e-3cbf-406e-ad2d-aeb51a68295d' progress to 50 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 969.891269] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293240, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.950277] env[61852]: DEBUG nova.network.neutron [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Successfully created port: a83d344f-834c-41d9-bb8f-27ebb900a57e {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 970.114051] env[61852]: DEBUG nova.compute.manager [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 970.127244] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f3f827-2372-49b5-b888-18bbd2a7eb05 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.152604] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance 'df332116-2ae3-4e51-99b0-108921470959' progress to 0 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 970.299076] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26cb57e5-2154-4430-8d9d-502fbec5744e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.305131] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88458602-170c-42ab-bce5-a1fba031314a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.339755] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cc69dc-cbdf-4545-abea-6c1af509f62e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.347902] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863b3c9f-0bf2-4504-ba17-8de83a5dc548 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.363227] env[61852]: DEBUG nova.compute.provider_tree [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.384515] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293240, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.450699} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.386463] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f5e5a587-44f8-4b6c-b924-cca27583fcf9/f5e5a587-44f8-4b6c-b924-cca27583fcf9.vmdk to [datastore1] 8d8679db-eb9d-45c1-b053-70378f58e273/8d8679db-eb9d-45c1-b053-70378f58e273.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 970.387403] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767e2311-d8c6-48ab-821f-b27920921c95 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.390414] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682adcc0-e724-4eb9-899e-aee70335ea7f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.417096] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6d7ec8-f292-4cdc-abca-017c9b7ff862 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.428635] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 8d8679db-eb9d-45c1-b053-70378f58e273/8d8679db-eb9d-45c1-b053-70378f58e273.vmdk or device None with type streamOptimized {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 970.428899] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be63f139-89a7-4b52-b2fb-8dd4555d2d73 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.459270] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance 'fb75509e-3cbf-406e-ad2d-aeb51a68295d' progress to 67 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 970.466045] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 970.466045] env[61852]: value = "task-1293243" [ 970.466045] env[61852]: _type = "Task" [ 970.466045] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.471392] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.663214] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.663744] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54ef40fc-4f0c-461a-94bd-62784170e06b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.671922] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 970.671922] env[61852]: value = "task-1293244" [ 970.671922] env[61852]: _type = "Task" [ 970.671922] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.682082] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.865983] env[61852]: DEBUG nova.scheduler.client.report [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 970.977062] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293243, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.006257] env[61852]: DEBUG nova.network.neutron [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Port 41d20024-17d1-4e43-ad02-a6316dcc9c2f binding to destination host cpu-1 is already ACTIVE {{(pid=61852) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 971.125072] env[61852]: DEBUG nova.compute.manager [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 971.159797] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 971.159956] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 971.160738] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.160738] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 971.160738] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.160738] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 971.160972] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 971.161130] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 971.161700] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 971.161700] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 971.161700] env[61852]: DEBUG nova.virt.hardware [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 971.163027] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ac07e0-6895-46b1-a540-f7e77ec3c50d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.178274] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a74de7c-d95f-4623-9c11-d40699c830d8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.187304] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293244, 'name': PowerOffVM_Task, 'duration_secs': 0.291041} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.195260] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 971.195507] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance 'df332116-2ae3-4e51-99b0-108921470959' progress to 17 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 971.253624] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.253827] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.254031] env[61852]: DEBUG nova.network.neutron [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.370536] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.371084] env[61852]: DEBUG nova.compute.manager [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 971.477897] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293243, 'name': ReconfigVM_Task, 'duration_secs': 0.547253} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.478215] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 8d8679db-eb9d-45c1-b053-70378f58e273/8d8679db-eb9d-45c1-b053-70378f58e273.vmdk or device None with type streamOptimized {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 971.478816] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b0eeb4b-fa0f-4ae3-9a11-a0b15c372756 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.484767] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 971.484767] env[61852]: value = "task-1293245" [ 971.484767] env[61852]: _type = "Task" [ 971.484767] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.492080] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293245, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.636752] env[61852]: DEBUG nova.compute.manager [req-63ce8e04-227b-45ea-bbd3-13a9d18748a9 req-ca232a29-a944-4d8c-87ee-c55612908c14 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Received event network-vif-plugged-a83d344f-834c-41d9-bb8f-27ebb900a57e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 971.637238] env[61852]: DEBUG oslo_concurrency.lockutils [req-63ce8e04-227b-45ea-bbd3-13a9d18748a9 req-ca232a29-a944-4d8c-87ee-c55612908c14 service nova] Acquiring lock "d9715a56-249f-4c19-a55b-730d352248cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.637582] env[61852]: DEBUG oslo_concurrency.lockutils [req-63ce8e04-227b-45ea-bbd3-13a9d18748a9 req-ca232a29-a944-4d8c-87ee-c55612908c14 service nova] Lock "d9715a56-249f-4c19-a55b-730d352248cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.638277] env[61852]: DEBUG oslo_concurrency.lockutils [req-63ce8e04-227b-45ea-bbd3-13a9d18748a9 req-ca232a29-a944-4d8c-87ee-c55612908c14 service nova] Lock "d9715a56-249f-4c19-a55b-730d352248cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.638554] env[61852]: DEBUG nova.compute.manager [req-63ce8e04-227b-45ea-bbd3-13a9d18748a9 req-ca232a29-a944-4d8c-87ee-c55612908c14 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] No waiting events found dispatching network-vif-plugged-a83d344f-834c-41d9-bb8f-27ebb900a57e {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 971.638747] env[61852]: WARNING nova.compute.manager [req-63ce8e04-227b-45ea-bbd3-13a9d18748a9 req-ca232a29-a944-4d8c-87ee-c55612908c14 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Received unexpected event network-vif-plugged-a83d344f-834c-41d9-bb8f-27ebb900a57e for instance with vm_state building and task_state spawning. [ 971.702492] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 971.702711] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 971.702873] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.703067] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 971.703220] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.703368] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 971.703568] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 971.703728] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 971.703891] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 971.704284] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 971.704492] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 971.711041] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adce1d8e-1d96-478d-885e-4a1b71c698bc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.735162] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 971.735162] env[61852]: value = "task-1293246" [ 971.735162] env[61852]: _type = "Task" [ 971.735162] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.744443] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293246, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.816916] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.817147] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquired lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.820684] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Forcefully refreshing network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 971.824141] env[61852]: DEBUG nova.network.neutron [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Successfully updated port: a83d344f-834c-41d9-bb8f-27ebb900a57e {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 971.875729] env[61852]: DEBUG nova.compute.utils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 971.879715] env[61852]: DEBUG nova.compute.manager [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 971.882034] env[61852]: DEBUG nova.network.neutron [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 971.925139] env[61852]: DEBUG nova.policy [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1349b8262e345068742af657fa8cbd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4dbb543c66364861bf5f437c8c33a550', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 971.996811] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293245, 'name': Rename_Task, 'duration_secs': 0.165773} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.997239] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 971.997411] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db565257-fd2c-4f8f-9959-b532797b7b7a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.004242] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 972.004242] env[61852]: value = "task-1293247" [ 972.004242] env[61852]: _type = "Task" [ 972.004242] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.029940] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.038719] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.038961] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.039160] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.202456] env[61852]: DEBUG nova.network.neutron [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Successfully created port: 580ae921-fe6d-4dfb-8b65-4dd88fec14ed {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 972.247034] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293246, 'name': ReconfigVM_Task, 'duration_secs': 0.300279} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.247536] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance 'df332116-2ae3-4e51-99b0-108921470959' progress to 33 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 972.277427] env[61852]: INFO nova.network.neutron [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Port 825f3034-375c-417d-9d76-971f3239ff59 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 972.277427] env[61852]: DEBUG nova.network.neutron [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [{"id": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "address": "fa:16:3e:b2:2d:44", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab757ae-eb", "ovs_interfaceid": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.332048] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquiring lock "refresh_cache-d9715a56-249f-4c19-a55b-730d352248cb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.332048] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquired lock "refresh_cache-d9715a56-249f-4c19-a55b-730d352248cb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.332048] env[61852]: DEBUG nova.network.neutron [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.388021] env[61852]: DEBUG nova.compute.manager [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 972.513774] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293247, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.725408] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "interface-d58958f2-7b6f-4480-9710-aa9e67ebd37c-825f3034-375c-417d-9d76-971f3239ff59" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.725719] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d58958f2-7b6f-4480-9710-aa9e67ebd37c-825f3034-375c-417d-9d76-971f3239ff59" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.726123] env[61852]: DEBUG nova.objects.instance [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'flavor' on Instance uuid d58958f2-7b6f-4480-9710-aa9e67ebd37c {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.753901] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 972.754199] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 972.754371] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 972.754677] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 972.754722] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 972.754950] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 972.755060] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 972.755226] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 972.755403] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 972.755571] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 972.755748] env[61852]: DEBUG nova.virt.hardware [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 972.761177] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Reconfiguring VM instance instance-0000005c to detach disk 2000 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 972.761700] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ab5b459-100f-460a-9b79-17da31c5e2b6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.778647] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.783389] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 972.783389] env[61852]: value = "task-1293248" [ 972.783389] env[61852]: _type = "Task" [ 972.783389] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.790555] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293248, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.842793] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquiring lock "b99bacc1-21e7-4bbd-8092-549246500421" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.843058] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lock "b99bacc1-21e7-4bbd-8092-549246500421" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.843299] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquiring lock "b99bacc1-21e7-4bbd-8092-549246500421-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.843491] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lock "b99bacc1-21e7-4bbd-8092-549246500421-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.843675] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lock "b99bacc1-21e7-4bbd-8092-549246500421-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.846253] env[61852]: INFO nova.compute.manager [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Terminating instance [ 972.849130] env[61852]: DEBUG nova.compute.manager [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 972.849332] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.850747] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354e9b8a-e750-482d-b75d-566d6cc21beb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.860247] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.860545] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9556d774-eab8-4e8e-a066-8d9a3d68da29 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.866693] env[61852]: DEBUG oslo_vmware.api [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for the task: (returnval){ [ 972.866693] env[61852]: value = "task-1293249" [ 972.866693] env[61852]: _type = "Task" [ 972.866693] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.877798] env[61852]: DEBUG oslo_vmware.api [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293249, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.878779] env[61852]: DEBUG nova.network.neutron [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.014495] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293247, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.089919] env[61852]: DEBUG nova.network.neutron [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Updating instance_info_cache with network_info: [{"id": "a83d344f-834c-41d9-bb8f-27ebb900a57e", "address": "fa:16:3e:46:47:86", "network": {"id": "9c679167-5a23-4eb7-a416-0af1b125c656", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1380673912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cad1538cc4c54d7582daffda576baacf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "744515ee-aa5b-4c23-b959-b56c51da6b86", "external-id": "nsx-vlan-transportzone-310", "segmentation_id": 310, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa83d344f-83", "ovs_interfaceid": "a83d344f-834c-41d9-bb8f-27ebb900a57e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.111366] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.111558] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.111736] env[61852]: DEBUG nova.network.neutron [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 973.167958] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updating instance_info_cache with network_info: [{"id": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "address": "fa:16:3e:82:81:71", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83bdd4e5-89", "ovs_interfaceid": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.283907] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9a45a74-09b1-40c7-9d6e-920ee5e3e6c6 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-4623565b-cd36-498c-a0e9-c3b1c6ef479b-825f3034-375c-417d-9d76-971f3239ff59" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.560s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.296997] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293248, 'name': ReconfigVM_Task, 'duration_secs': 0.353112} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.297133] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Reconfigured VM instance instance-0000005c to detach disk 2000 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 973.297949] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a8bd99-0b3c-4fc7-ac86-67c72f6d506c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.321714] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] df332116-2ae3-4e51-99b0-108921470959/df332116-2ae3-4e51-99b0-108921470959.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.322310] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f78a0e2-ebca-49d3-9fba-fe3ac68ac00b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.335525] env[61852]: DEBUG nova.objects.instance [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'pci_requests' on Instance uuid d58958f2-7b6f-4480-9710-aa9e67ebd37c {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 973.342128] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 973.342128] env[61852]: value = "task-1293250" [ 973.342128] env[61852]: _type = "Task" [ 973.342128] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.351011] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293250, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.376752] env[61852]: DEBUG oslo_vmware.api [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293249, 'name': PowerOffVM_Task, 'duration_secs': 0.206622} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.377091] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.377304] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.377578] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e26d12b-e4cf-4034-a2fa-5a34652d9836 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.394092] env[61852]: DEBUG nova.compute.manager [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 973.422257] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 973.422705] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 973.423720] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 973.423985] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 973.424211] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 973.424404] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 973.424635] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 973.424818] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 973.425021] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 973.425201] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 973.425389] env[61852]: DEBUG nova.virt.hardware [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 973.426479] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb3fa84-4522-4829-882a-1ece1bcc766f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.434267] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f714ffd7-f0aa-432d-998e-1e815174b569 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.441729] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.441958] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.442174] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Deleting the datastore file [datastore2] b99bacc1-21e7-4bbd-8092-549246500421 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.449652] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53034fc7-7a5a-4b3f-ad1e-6a96bac94950 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.455865] env[61852]: DEBUG oslo_vmware.api [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for the task: (returnval){ [ 973.455865] env[61852]: value = "task-1293252" [ 973.455865] env[61852]: _type = "Task" [ 973.455865] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.463293] env[61852]: DEBUG oslo_vmware.api [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.515476] env[61852]: DEBUG oslo_vmware.api [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293247, 'name': PowerOnVM_Task, 'duration_secs': 1.394533} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.515833] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 973.593044] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Releasing lock "refresh_cache-d9715a56-249f-4c19-a55b-730d352248cb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.593388] env[61852]: DEBUG nova.compute.manager [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Instance network_info: |[{"id": "a83d344f-834c-41d9-bb8f-27ebb900a57e", "address": "fa:16:3e:46:47:86", "network": {"id": "9c679167-5a23-4eb7-a416-0af1b125c656", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1380673912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cad1538cc4c54d7582daffda576baacf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "744515ee-aa5b-4c23-b959-b56c51da6b86", "external-id": "nsx-vlan-transportzone-310", "segmentation_id": 310, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa83d344f-83", "ovs_interfaceid": "a83d344f-834c-41d9-bb8f-27ebb900a57e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 973.593837] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:47:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '744515ee-aa5b-4c23-b959-b56c51da6b86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a83d344f-834c-41d9-bb8f-27ebb900a57e', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.601379] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Creating folder: Project (cad1538cc4c54d7582daffda576baacf). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.601685] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19be38ec-794d-418f-a915-b913185e4941 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.610479] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Created folder: Project (cad1538cc4c54d7582daffda576baacf) in parent group-v277280. [ 973.610686] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Creating folder: Instances. Parent ref: group-v277403. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.610937] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69062392-964c-42f7-931b-bf775a0d2b41 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.620227] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Created folder: Instances in parent group-v277403. [ 973.620489] env[61852]: DEBUG oslo.service.loopingcall [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.620717] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.620978] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea117ead-af3a-4496-b868-2992a0f1ed0a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.636638] env[61852]: DEBUG nova.compute.manager [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 973.637450] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3713a096-5f9f-4ef3-b897-25c91d13fd16 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.646081] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.646081] env[61852]: value = "task-1293255" [ 973.646081] env[61852]: _type = "Task" [ 973.646081] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.657157] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293255, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.671078] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Releasing lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.671311] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updated the network info_cache for instance {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 973.671484] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.671740] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.671933] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.672211] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.672426] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.672638] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.672772] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 973.672917] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.684722] env[61852]: DEBUG nova.compute.manager [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received event network-changed-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 973.684972] env[61852]: DEBUG nova.compute.manager [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing instance network info cache due to event network-changed-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 973.685230] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Acquiring lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.685382] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Acquired lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.685548] env[61852]: DEBUG nova.network.neutron [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Refreshing network info cache for port 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.742663] env[61852]: DEBUG nova.network.neutron [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Successfully updated port: 580ae921-fe6d-4dfb-8b65-4dd88fec14ed {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 973.838771] env[61852]: DEBUG nova.objects.base [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 973.839015] env[61852]: DEBUG nova.network.neutron [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 973.852245] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293250, 'name': ReconfigVM_Task, 'duration_secs': 0.266193} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.852541] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Reconfigured VM instance instance-0000005c to attach disk [datastore1] df332116-2ae3-4e51-99b0-108921470959/df332116-2ae3-4e51-99b0-108921470959.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.852844] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance 'df332116-2ae3-4e51-99b0-108921470959' progress to 50 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 973.906565] env[61852]: DEBUG nova.network.neutron [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance_info_cache with network_info: [{"id": "41d20024-17d1-4e43-ad02-a6316dcc9c2f", "address": "fa:16:3e:96:00:1c", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d20024-17", "ovs_interfaceid": "41d20024-17d1-4e43-ad02-a6316dcc9c2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.931192] env[61852]: DEBUG nova.policy [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f04d129452d4eb79514c52a6972af0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e93a6965a6884292bc56b01f7d54a622', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 973.965266] env[61852]: DEBUG oslo_vmware.api [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Task: {'id': task-1293252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159381} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.965525] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.965709] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.965882] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.966208] env[61852]: INFO nova.compute.manager [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Took 1.12 seconds to destroy the instance on the hypervisor. [ 973.966378] env[61852]: DEBUG oslo.service.loopingcall [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.966578] env[61852]: DEBUG nova.compute.manager [-] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 973.966680] env[61852]: DEBUG nova.network.neutron [-] [instance: b99bacc1-21e7-4bbd-8092-549246500421] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.159516] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293255, 'name': CreateVM_Task, 'duration_secs': 0.28028} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.161027] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.161479] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93d550f7-1c5d-4570-83fa-671b9bab11ea tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "8d8679db-eb9d-45c1-b053-70378f58e273" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 25.144s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.162696] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.162861] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.163204] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 974.163614] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-648f726b-f113-4858-a10b-e63127977294 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.168573] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for the task: (returnval){ [ 974.168573] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5256e0eb-e6a8-ecd8-3f32-287d018a9ce7" [ 974.168573] env[61852]: _type = "Task" [ 974.168573] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.175612] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.175821] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.175983] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.176181] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 974.177216] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe31342f-3437-46e6-87af-3af96505df7a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.189635] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234f04ff-d36a-495c-bfd4-a9535e45ae21 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.195010] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5256e0eb-e6a8-ecd8-3f32-287d018a9ce7, 'name': SearchDatastore_Task, 'duration_secs': 0.015279} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.195556] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.195777] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.196022] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.196514] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.196514] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.197290] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9afd79ba-f92d-4f83-afb2-64212e83d658 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.207909] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd372e7-0ba4-4f5d-964f-12d694b3be5a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.214736] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236036d6-a60e-4e46-badc-560f8aa74500 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.218034] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.218216] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 974.218908] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab6703af-d53b-4bdd-8907-0feea9cd1783 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.249402] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for the task: (returnval){ [ 974.249402] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527fc18b-8ce2-aef2-3318-fcea02dd1a3b" [ 974.249402] env[61852]: _type = "Task" [ 974.249402] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.250226] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "refresh_cache-9f39bee8-52b8-426d-9b8a-114e3a6a6343" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.250359] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "refresh_cache-9f39bee8-52b8-426d-9b8a-114e3a6a6343" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.250502] env[61852]: DEBUG nova.network.neutron [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 974.251708] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180683MB free_disk=138GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 974.251843] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.252026] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.265843] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527fc18b-8ce2-aef2-3318-fcea02dd1a3b, 'name': SearchDatastore_Task, 'duration_secs': 0.010436} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.266712] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-160a4772-6db3-49c0-898d-4db43e4714b9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.272410] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for the task: (returnval){ [ 974.272410] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52846263-89d8-86c9-4a0d-a0d563195b44" [ 974.272410] env[61852]: _type = "Task" [ 974.272410] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.280268] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52846263-89d8-86c9-4a0d-a0d563195b44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.359569] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665c743b-24ce-459c-8da3-2e00b2afe6c4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.379088] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47024562-7566-4ad0-a33b-d6d947f2c51b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.396980] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance 'df332116-2ae3-4e51-99b0-108921470959' progress to 67 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 974.409474] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.578489] env[61852]: DEBUG nova.network.neutron [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updated VIF entry in instance network info cache for port 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.578853] env[61852]: DEBUG nova.network.neutron [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [{"id": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "address": "fa:16:3e:b2:2d:44", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab757ae-eb", "ovs_interfaceid": "6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.782509] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52846263-89d8-86c9-4a0d-a0d563195b44, 'name': SearchDatastore_Task, 'duration_secs': 0.048204} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.783993] env[61852]: DEBUG nova.network.neutron [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 974.785922] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.786229] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] d9715a56-249f-4c19-a55b-730d352248cb/d9715a56-249f-4c19-a55b-730d352248cb.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 974.786631] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15024bc1-52af-4fcb-8c54-4d0a9a83921c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.795189] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for the task: (returnval){ [ 974.795189] env[61852]: value = "task-1293256" [ 974.795189] env[61852]: _type = "Task" [ 974.795189] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.805430] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293256, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.937307] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed73fe8-23e2-4390-8d70-866fda2c9e45 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.960592] env[61852]: DEBUG nova.network.neutron [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Port 16ad03e7-b72d-4cdd-8da7-5314a7cad855 binding to destination host cpu-1 is already ACTIVE {{(pid=61852) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 974.963919] env[61852]: DEBUG nova.network.neutron [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Updating instance_info_cache with network_info: [{"id": "580ae921-fe6d-4dfb-8b65-4dd88fec14ed", "address": "fa:16:3e:49:75:35", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap580ae921-fe", "ovs_interfaceid": "580ae921-fe6d-4dfb-8b65-4dd88fec14ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.964181] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed9954b-75c4-431a-8251-663fa3214ae3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.977024] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance 'fb75509e-3cbf-406e-ad2d-aeb51a68295d' progress to 83 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 975.081965] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Releasing lock "refresh_cache-4623565b-cd36-498c-a0e9-c3b1c6ef479b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.082399] env[61852]: DEBUG nova.compute.manager [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Received event network-changed-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.082531] env[61852]: DEBUG nova.compute.manager [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Refreshing instance network info cache due to event network-changed-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 975.082757] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Acquiring lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.083123] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Acquired lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.083477] env[61852]: DEBUG nova.network.neutron [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Refreshing network info cache for port fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 975.224890] env[61852]: DEBUG nova.network.neutron [-] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.263638] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Applying migration context for instance df332116-2ae3-4e51-99b0-108921470959 as it has an incoming, in-progress migration ccb20a30-bdc7-49d6-960d-caf0ea2fe3b6. Migration status is migrating {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 975.263878] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Applying migration context for instance fb75509e-3cbf-406e-ad2d-aeb51a68295d as it has an incoming, in-progress migration a0ebf04b-b0d3-4993-a5ba-06cba9c38fe7. Migration status is post-migrating {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 975.265379] env[61852]: INFO nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating resource usage from migration ccb20a30-bdc7-49d6-960d-caf0ea2fe3b6 [ 975.265379] env[61852]: INFO nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating resource usage from migration a0ebf04b-b0d3-4993-a5ba-06cba9c38fe7 [ 975.281716] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 4623565b-cd36-498c-a0e9-c3b1c6ef479b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.281883] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance b99bacc1-21e7-4bbd-8092-549246500421 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.282068] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d58958f2-7b6f-4480-9710-aa9e67ebd37c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.282231] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 8d8679db-eb9d-45c1-b053-70378f58e273 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.282388] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Migration a0ebf04b-b0d3-4993-a5ba-06cba9c38fe7 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 975.282538] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance fb75509e-3cbf-406e-ad2d-aeb51a68295d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.282684] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Migration ccb20a30-bdc7-49d6-960d-caf0ea2fe3b6 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 975.282846] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance df332116-2ae3-4e51-99b0-108921470959 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.283384] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d9715a56-249f-4c19-a55b-730d352248cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.283384] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 9f39bee8-52b8-426d-9b8a-114e3a6a6343 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 975.283514] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 975.283718] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2560MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 975.307886] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293256, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464836} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.307886] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] d9715a56-249f-4c19-a55b-730d352248cb/d9715a56-249f-4c19-a55b-730d352248cb.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 975.308098] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 975.308246] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27cc6b1e-b285-4ea6-9ec3-f3c7bd263e82 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.314397] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for the task: (returnval){ [ 975.314397] env[61852]: value = "task-1293257" [ 975.314397] env[61852]: _type = "Task" [ 975.314397] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.321648] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293257, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.413990] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9236efc5-bf56-4c1d-9a33-522de0ce4348 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.423464] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ab124a-deca-4754-a7d0-9db677f7c32c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.459476] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30aa0a39-2a36-486b-be1d-fa55b3788fbc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.466796] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afca71bc-9593-40d5-9754-7bb9b842218d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.474272] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "refresh_cache-9f39bee8-52b8-426d-9b8a-114e3a6a6343" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.474542] env[61852]: DEBUG nova.compute.manager [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Instance network_info: |[{"id": "580ae921-fe6d-4dfb-8b65-4dd88fec14ed", "address": "fa:16:3e:49:75:35", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap580ae921-fe", "ovs_interfaceid": "580ae921-fe6d-4dfb-8b65-4dd88fec14ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 975.474927] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:75:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2c424c9-6446-4b2a-af8c-4d9c29117c39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '580ae921-fe6d-4dfb-8b65-4dd88fec14ed', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 975.482464] env[61852]: DEBUG oslo.service.loopingcall [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 975.486115] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 975.486115] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 975.493328] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8436e349-08b7-47ae-bf8a-ffc253cb5392 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.495167] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a57296e-c37b-4122-8b92-b73a7fa68904 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.510538] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.517236] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 975.517236] env[61852]: value = "task-1293258" [ 975.517236] env[61852]: _type = "Task" [ 975.517236] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.522023] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 975.522023] env[61852]: value = "task-1293259" [ 975.522023] env[61852]: _type = "Task" [ 975.522023] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.530264] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293258, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.533559] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293259, 'name': CreateVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.729710] env[61852]: INFO nova.compute.manager [-] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Took 1.76 seconds to deallocate network for instance. [ 975.773018] env[61852]: DEBUG nova.compute.manager [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Received event network-changed-580ae921-fe6d-4dfb-8b65-4dd88fec14ed {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.773278] env[61852]: DEBUG nova.compute.manager [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Refreshing instance network info cache due to event network-changed-580ae921-fe6d-4dfb-8b65-4dd88fec14ed. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 975.773500] env[61852]: DEBUG oslo_concurrency.lockutils [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] Acquiring lock "refresh_cache-9f39bee8-52b8-426d-9b8a-114e3a6a6343" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.773685] env[61852]: DEBUG oslo_concurrency.lockutils [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] Acquired lock "refresh_cache-9f39bee8-52b8-426d-9b8a-114e3a6a6343" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.773872] env[61852]: DEBUG nova.network.neutron [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Refreshing network info cache for port 580ae921-fe6d-4dfb-8b65-4dd88fec14ed {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 975.800513] env[61852]: DEBUG nova.network.neutron [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Successfully updated port: 825f3034-375c-417d-9d76-971f3239ff59 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 975.825554] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293257, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06132} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.826371] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.826612] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f83e61-b55b-4266-aff2-7479cee6bcfa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.848840] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] d9715a56-249f-4c19-a55b-730d352248cb/d9715a56-249f-4c19-a55b-730d352248cb.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.851872] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d89d18a-d1f6-4b01-9176-a42a75ae0ea5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.873107] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for the task: (returnval){ [ 975.873107] env[61852]: value = "task-1293260" [ 975.873107] env[61852]: _type = "Task" [ 975.873107] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.882040] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293260, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.944444] env[61852]: DEBUG nova.network.neutron [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updated VIF entry in instance network info cache for port fb4d01a4-4b0f-4591-aaf9-f8487c4cd460. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 975.944925] env[61852]: DEBUG nova.network.neutron [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updating instance_info_cache with network_info: [{"id": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "address": "fa:16:3e:fe:46:c7", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb4d01a4-4b", "ovs_interfaceid": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.998751] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "df332116-2ae3-4e51-99b0-108921470959-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.999018] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "df332116-2ae3-4e51-99b0-108921470959-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.999226] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "df332116-2ae3-4e51-99b0-108921470959-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.013551] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 976.031639] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293259, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.034924] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293258, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.157075] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1639938-5458-4a10-8c3a-9e3d6b9f523e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.164357] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-217e913b-b29f-418b-8f60-752503093555 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Suspending the VM {{(pid=61852) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 976.164663] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-de4fea71-670e-4ec0-8ae1-7c07b950e6d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.170730] env[61852]: DEBUG oslo_vmware.api [None req-217e913b-b29f-418b-8f60-752503093555 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 976.170730] env[61852]: value = "task-1293261" [ 976.170730] env[61852]: _type = "Task" [ 976.170730] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.178604] env[61852]: DEBUG oslo_vmware.api [None req-217e913b-b29f-418b-8f60-752503093555 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293261, 'name': SuspendVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.237219] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.306344] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.385513] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293260, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.448366] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Releasing lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.448682] env[61852]: DEBUG nova.compute.manager [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Received event network-changed-a83d344f-834c-41d9-bb8f-27ebb900a57e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 976.448865] env[61852]: DEBUG nova.compute.manager [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Refreshing instance network info cache due to event network-changed-a83d344f-834c-41d9-bb8f-27ebb900a57e. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 976.449091] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Acquiring lock "refresh_cache-d9715a56-249f-4c19-a55b-730d352248cb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.449240] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Acquired lock "refresh_cache-d9715a56-249f-4c19-a55b-730d352248cb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.449404] env[61852]: DEBUG nova.network.neutron [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Refreshing network info cache for port a83d344f-834c-41d9-bb8f-27ebb900a57e {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.450777] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.450964] env[61852]: DEBUG nova.network.neutron [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.487861] env[61852]: DEBUG nova.network.neutron [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Updated VIF entry in instance network info cache for port 580ae921-fe6d-4dfb-8b65-4dd88fec14ed. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 976.488190] env[61852]: DEBUG nova.network.neutron [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Updating instance_info_cache with network_info: [{"id": "580ae921-fe6d-4dfb-8b65-4dd88fec14ed", "address": "fa:16:3e:49:75:35", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap580ae921-fe", "ovs_interfaceid": "580ae921-fe6d-4dfb-8b65-4dd88fec14ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.517990] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 976.518315] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.266s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.518952] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.282s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.519194] env[61852]: DEBUG nova.objects.instance [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lazy-loading 'resources' on Instance uuid b99bacc1-21e7-4bbd-8092-549246500421 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.536263] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293258, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.540564] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293259, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.680283] env[61852]: DEBUG oslo_vmware.api [None req-217e913b-b29f-418b-8f60-752503093555 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293261, 'name': SuspendVM_Task} progress is 87%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.885055] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293260, 'name': ReconfigVM_Task, 'duration_secs': 0.592295} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.885309] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Reconfigured VM instance instance-0000005d to attach disk [datastore2] d9715a56-249f-4c19-a55b-730d352248cb/d9715a56-249f-4c19-a55b-730d352248cb.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.885962] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2095e83f-e6e2-40ba-a44d-8ccea6722797 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.892893] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for the task: (returnval){ [ 976.892893] env[61852]: value = "task-1293262" [ 976.892893] env[61852]: _type = "Task" [ 976.892893] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.901223] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293262, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.991205] env[61852]: DEBUG oslo_concurrency.lockutils [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] Releasing lock "refresh_cache-9f39bee8-52b8-426d-9b8a-114e3a6a6343" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.991554] env[61852]: DEBUG nova.compute.manager [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Received event network-vif-deleted-2c88ebc1-acb3-41ba-8d38-8647755ce777 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 976.991803] env[61852]: DEBUG nova.compute.manager [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Received event network-vif-plugged-825f3034-375c-417d-9d76-971f3239ff59 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 976.992059] env[61852]: DEBUG oslo_concurrency.lockutils [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] Acquiring lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.992327] env[61852]: DEBUG oslo_concurrency.lockutils [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] Lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.992546] env[61852]: DEBUG oslo_concurrency.lockutils [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] Lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.992725] env[61852]: DEBUG nova.compute.manager [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] No waiting events found dispatching network-vif-plugged-825f3034-375c-417d-9d76-971f3239ff59 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 976.992897] env[61852]: WARNING nova.compute.manager [req-776c742b-1b06-4a76-be80-ff7e6297e656 req-61e3958b-4bdb-4689-9315-22dbfb6ac9ed service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Received unexpected event network-vif-plugged-825f3034-375c-417d-9d76-971f3239ff59 for instance with vm_state active and task_state None. [ 977.006493] env[61852]: WARNING nova.network.neutron [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] d984a6fb-5f5f-4678-bc8a-3723c26f290a already exists in list: networks containing: ['d984a6fb-5f5f-4678-bc8a-3723c26f290a']. ignoring it [ 977.032069] env[61852]: DEBUG oslo_vmware.api [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293258, 'name': PowerOnVM_Task, 'duration_secs': 1.320771} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.035458] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 977.035664] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa4403e-9723-4425-92ec-623818e8d2a0 tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance 'fb75509e-3cbf-406e-ad2d-aeb51a68295d' progress to 100 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 977.044861] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293259, 'name': CreateVM_Task, 'duration_secs': 1.368177} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.044861] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 977.045501] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.045629] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.045921] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 977.046214] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3268f325-19df-45b4-87f0-d934dd36f16b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.053948] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 977.053948] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f706c8-a0ec-2440-82e7-0ec72f268c21" [ 977.053948] env[61852]: _type = "Task" [ 977.053948] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.065684] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f706c8-a0ec-2440-82e7-0ec72f268c21, 'name': SearchDatastore_Task, 'duration_secs': 0.009584} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.065998] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.066300] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 977.066551] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.066699] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.066884] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 977.069886] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14cc20d6-7427-4ec7-ba02-bed2cb2471d7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.073190] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.073281] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.073582] env[61852]: DEBUG nova.network.neutron [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 977.084536] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 977.084842] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 977.085599] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0a15984-bfcd-4e65-929a-8dc0ce3cc267 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.094676] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 977.094676] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5238aa4b-60ba-f5b3-b7e6-cddeb21096ab" [ 977.094676] env[61852]: _type = "Task" [ 977.094676] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.104130] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5238aa4b-60ba-f5b3-b7e6-cddeb21096ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.187427] env[61852]: DEBUG oslo_vmware.api [None req-217e913b-b29f-418b-8f60-752503093555 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293261, 'name': SuspendVM_Task, 'duration_secs': 0.938645} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.187700] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-217e913b-b29f-418b-8f60-752503093555 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Suspended the VM {{(pid=61852) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 977.187880] env[61852]: DEBUG nova.compute.manager [None req-217e913b-b29f-418b-8f60-752503093555 tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 977.188666] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74244c73-70c7-4a90-af8d-19620db6e4e9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.192696] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1179e964-e137-4281-a5ad-c9f36a1ec799 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.203379] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d369372b-eff8-42c3-bb86-ae1cc20ec4b7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.243246] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6ebdc7-7d03-4ebd-9a2e-8041e4c5631d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.251603] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e62660-e6fb-48a4-9a93-553ae2c0870d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.265288] env[61852]: DEBUG nova.compute.provider_tree [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.367467] env[61852]: DEBUG nova.network.neutron [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Updated VIF entry in instance network info cache for port a83d344f-834c-41d9-bb8f-27ebb900a57e. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 977.367819] env[61852]: DEBUG nova.network.neutron [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Updating instance_info_cache with network_info: [{"id": "a83d344f-834c-41d9-bb8f-27ebb900a57e", "address": "fa:16:3e:46:47:86", "network": {"id": "9c679167-5a23-4eb7-a416-0af1b125c656", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1380673912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cad1538cc4c54d7582daffda576baacf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "744515ee-aa5b-4c23-b959-b56c51da6b86", "external-id": "nsx-vlan-transportzone-310", "segmentation_id": 310, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa83d344f-83", "ovs_interfaceid": "a83d344f-834c-41d9-bb8f-27ebb900a57e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.402576] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293262, 'name': Rename_Task, 'duration_secs': 0.190146} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.402846] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 977.403099] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb9471f4-838a-4d45-b676-a3dced8f8235 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.409490] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for the task: (returnval){ [ 977.409490] env[61852]: value = "task-1293263" [ 977.409490] env[61852]: _type = "Task" [ 977.409490] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.419699] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293263, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.449580] env[61852]: DEBUG nova.network.neutron [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updating instance_info_cache with network_info: [{"id": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "address": "fa:16:3e:fe:46:c7", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb4d01a4-4b", "ovs_interfaceid": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "825f3034-375c-417d-9d76-971f3239ff59", "address": "fa:16:3e:a6:a4:09", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825f3034-37", "ovs_interfaceid": "825f3034-375c-417d-9d76-971f3239ff59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.604407] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5238aa4b-60ba-f5b3-b7e6-cddeb21096ab, 'name': SearchDatastore_Task, 'duration_secs': 0.008829} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.607229] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7454f54c-663f-40de-a2a2-9c4a86b6b69d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.612474] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 977.612474] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52588162-3450-5e45-cf1b-18c9630097dd" [ 977.612474] env[61852]: _type = "Task" [ 977.612474] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.621233] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52588162-3450-5e45-cf1b-18c9630097dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.764222] env[61852]: DEBUG nova.network.neutron [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance_info_cache with network_info: [{"id": "16ad03e7-b72d-4cdd-8da7-5314a7cad855", "address": "fa:16:3e:06:57:79", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ad03e7-b7", "ovs_interfaceid": "16ad03e7-b72d-4cdd-8da7-5314a7cad855", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.768583] env[61852]: DEBUG nova.scheduler.client.report [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 977.797552] env[61852]: DEBUG nova.compute.manager [req-ab4be45e-6e2a-452d-91a4-a7935c0f776b req-03a64e70-9002-4624-8ffb-d941b4f18927 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Received event network-changed-825f3034-375c-417d-9d76-971f3239ff59 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 977.797738] env[61852]: DEBUG nova.compute.manager [req-ab4be45e-6e2a-452d-91a4-a7935c0f776b req-03a64e70-9002-4624-8ffb-d941b4f18927 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Refreshing instance network info cache due to event network-changed-825f3034-375c-417d-9d76-971f3239ff59. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 977.797896] env[61852]: DEBUG oslo_concurrency.lockutils [req-ab4be45e-6e2a-452d-91a4-a7935c0f776b req-03a64e70-9002-4624-8ffb-d941b4f18927 service nova] Acquiring lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.870136] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Releasing lock "refresh_cache-d9715a56-249f-4c19-a55b-730d352248cb" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.870421] env[61852]: DEBUG nova.compute.manager [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Received event network-vif-plugged-580ae921-fe6d-4dfb-8b65-4dd88fec14ed {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 977.870625] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Acquiring lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.870852] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.871027] env[61852]: DEBUG oslo_concurrency.lockutils [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] Lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.871202] env[61852]: DEBUG nova.compute.manager [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] No waiting events found dispatching network-vif-plugged-580ae921-fe6d-4dfb-8b65-4dd88fec14ed {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 977.871372] env[61852]: WARNING nova.compute.manager [req-73a08a4a-5543-468e-b132-21c8c0f63854 req-111744c2-e28e-4c05-810f-8d3be9eb7650 service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Received unexpected event network-vif-plugged-580ae921-fe6d-4dfb-8b65-4dd88fec14ed for instance with vm_state building and task_state spawning. [ 977.920752] env[61852]: DEBUG oslo_vmware.api [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293263, 'name': PowerOnVM_Task, 'duration_secs': 0.44806} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.921020] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 977.921243] env[61852]: INFO nova.compute.manager [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Took 6.80 seconds to spawn the instance on the hypervisor. [ 977.921485] env[61852]: DEBUG nova.compute.manager [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 977.922286] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ad34d2-133d-4fae-9b70-156b0f83f374 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.952829] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.953519] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.953683] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.953967] env[61852]: DEBUG oslo_concurrency.lockutils [req-ab4be45e-6e2a-452d-91a4-a7935c0f776b req-03a64e70-9002-4624-8ffb-d941b4f18927 service nova] Acquired lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.954168] env[61852]: DEBUG nova.network.neutron [req-ab4be45e-6e2a-452d-91a4-a7935c0f776b req-03a64e70-9002-4624-8ffb-d941b4f18927 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Refreshing network info cache for port 825f3034-375c-417d-9d76-971f3239ff59 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 977.955879] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993d075e-79d0-45f3-bb76-635e8ec8d9d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.974552] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 977.974706] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 977.974810] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 977.974986] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 977.975160] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 977.975317] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 977.975526] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 977.975689] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 977.975864] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 977.976119] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 977.976359] env[61852]: DEBUG nova.virt.hardware [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 977.982629] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Reconfiguring VM to attach interface {{(pid=61852) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 977.983257] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fbda48a-5eb5-4b36-8538-a3b9d355ea5f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.000717] env[61852]: DEBUG oslo_vmware.api [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 978.000717] env[61852]: value = "task-1293264" [ 978.000717] env[61852]: _type = "Task" [ 978.000717] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.008665] env[61852]: DEBUG oslo_vmware.api [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293264, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.123054] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52588162-3450-5e45-cf1b-18c9630097dd, 'name': SearchDatastore_Task, 'duration_secs': 0.009502} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.123304] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.123564] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 9f39bee8-52b8-426d-9b8a-114e3a6a6343/9f39bee8-52b8-426d-9b8a-114e3a6a6343.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 978.123821] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-516cda0f-9354-4822-9b2c-d97e3b9cc124 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.130612] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 978.130612] env[61852]: value = "task-1293265" [ 978.130612] env[61852]: _type = "Task" [ 978.130612] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.140934] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293265, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.266849] env[61852]: DEBUG oslo_concurrency.lockutils [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.272752] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.754s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.297364] env[61852]: INFO nova.scheduler.client.report [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Deleted allocations for instance b99bacc1-21e7-4bbd-8092-549246500421 [ 978.440025] env[61852]: INFO nova.compute.manager [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Took 14.01 seconds to build instance. [ 978.510620] env[61852]: DEBUG oslo_vmware.api [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293264, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.641758] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293265, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.677060] env[61852]: INFO nova.compute.manager [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Resuming [ 978.677475] env[61852]: DEBUG nova.objects.instance [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lazy-loading 'flavor' on Instance uuid 8d8679db-eb9d-45c1-b053-70378f58e273 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 978.683751] env[61852]: DEBUG nova.network.neutron [req-ab4be45e-6e2a-452d-91a4-a7935c0f776b req-03a64e70-9002-4624-8ffb-d941b4f18927 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updated VIF entry in instance network info cache for port 825f3034-375c-417d-9d76-971f3239ff59. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 978.684265] env[61852]: DEBUG nova.network.neutron [req-ab4be45e-6e2a-452d-91a4-a7935c0f776b req-03a64e70-9002-4624-8ffb-d941b4f18927 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updating instance_info_cache with network_info: [{"id": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "address": "fa:16:3e:fe:46:c7", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb4d01a4-4b", "ovs_interfaceid": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "825f3034-375c-417d-9d76-971f3239ff59", "address": "fa:16:3e:a6:a4:09", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825f3034-37", "ovs_interfaceid": "825f3034-375c-417d-9d76-971f3239ff59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.789293] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a65e35-a16e-4f90-9074-449fb2b8e8f2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.812833] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70107b3f-901f-481b-8d41-b9ad59f85ae2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.816243] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c278dc1f-a83b-4315-a6fc-7bcdf46d05c9 tempest-ServersTestFqdnHostnames-2043379552 tempest-ServersTestFqdnHostnames-2043379552-project-member] Lock "b99bacc1-21e7-4bbd-8092-549246500421" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.973s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.822411] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance 'df332116-2ae3-4e51-99b0-108921470959' progress to 83 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 978.941365] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5baa06e3-7340-4719-a2e2-121e9ad78ff9 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lock "d9715a56-249f-4c19-a55b-730d352248cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.516s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.957233] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.957512] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.957714] env[61852]: DEBUG nova.compute.manager [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Going to confirm migration 2 {{(pid=61852) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 979.010455] env[61852]: DEBUG oslo_vmware.api [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293264, 'name': ReconfigVM_Task, 'duration_secs': 0.979875} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.011041] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.011252] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Reconfigured VM to attach interface {{(pid=61852) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 979.141199] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293265, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518366} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.141467] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 9f39bee8-52b8-426d-9b8a-114e3a6a6343/9f39bee8-52b8-426d-9b8a-114e3a6a6343.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 979.141634] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 979.141877] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-025ff2b6-3226-42c0-9ab3-dce0836133f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.148794] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 979.148794] env[61852]: value = "task-1293266" [ 979.148794] env[61852]: _type = "Task" [ 979.148794] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.159338] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.186893] env[61852]: DEBUG oslo_concurrency.lockutils [req-ab4be45e-6e2a-452d-91a4-a7935c0f776b req-03a64e70-9002-4624-8ffb-d941b4f18927 service nova] Releasing lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.331460] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 979.331700] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7287aa6-0ba9-429c-8c45-f9c1adb69ff6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.338469] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 979.338469] env[61852]: value = "task-1293267" [ 979.338469] env[61852]: _type = "Task" [ 979.338469] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.354387] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293267, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.516640] env[61852]: DEBUG oslo_concurrency.lockutils [None req-f11b8290-3e40-4075-82bf-9957759356ab tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d58958f2-7b6f-4480-9710-aa9e67ebd37c-825f3034-375c-417d-9d76-971f3239ff59" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.790s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.554688] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.554879] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquired lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.555069] env[61852]: DEBUG nova.network.neutron [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 979.555278] env[61852]: DEBUG nova.objects.instance [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lazy-loading 'info_cache' on Instance uuid fb75509e-3cbf-406e-ad2d-aeb51a68295d {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.658887] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127481} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.659166] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 979.659908] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8855a6-c9f2-47a9-b897-68b5ba3638bc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.681066] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 9f39bee8-52b8-426d-9b8a-114e3a6a6343/9f39bee8-52b8-426d-9b8a-114e3a6a6343.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 979.681561] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85d51079-b7c4-4b93-af84-7ab29b92d84f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.695871] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.696046] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquired lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.696247] env[61852]: DEBUG nova.network.neutron [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 979.705092] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 979.705092] env[61852]: value = "task-1293268" [ 979.705092] env[61852]: _type = "Task" [ 979.705092] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.713897] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293268, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.848410] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293267, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.214533] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.349416] env[61852]: DEBUG oslo_vmware.api [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293267, 'name': PowerOnVM_Task, 'duration_secs': 0.825838} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.349811] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 980.350824] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-93fa6850-bd2a-4a29-8daa-9edbd81c5535 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance 'df332116-2ae3-4e51-99b0-108921470959' progress to 100 {{(pid=61852) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 980.607675] env[61852]: DEBUG nova.network.neutron [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updating instance_info_cache with network_info: [{"id": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "address": "fa:16:3e:82:81:71", "network": {"id": "d8dfb48f-1d4c-40ca-a2c0-27b808516657", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-603860889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fdd2d4aeb954b6fae049090b32f657b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83bdd4e5-89", "ovs_interfaceid": "83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.715369] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293268, 'name': ReconfigVM_Task, 'duration_secs': 0.761924} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.715669] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 9f39bee8-52b8-426d-9b8a-114e3a6a6343/9f39bee8-52b8-426d-9b8a-114e3a6a6343.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.716393] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-feb43560-fe67-4478-9b69-cc9590a9a9a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.723438] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 980.723438] env[61852]: value = "task-1293269" [ 980.723438] env[61852]: _type = "Task" [ 980.723438] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.732160] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293269, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.775485] env[61852]: DEBUG nova.network.neutron [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance_info_cache with network_info: [{"id": "41d20024-17d1-4e43-ad02-a6316dcc9c2f", "address": "fa:16:3e:96:00:1c", "network": {"id": "240e5d63-b796-4cef-9d1f-5d8f8868dea4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1472329620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdac3605118e44a69d44ab56cafe2e21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41d20024-17", "ovs_interfaceid": "41d20024-17d1-4e43-ad02-a6316dcc9c2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.111156] env[61852]: DEBUG oslo_concurrency.lockutils [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Releasing lock "refresh_cache-8d8679db-eb9d-45c1-b053-70378f58e273" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.112940] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f2eefc-49ea-4752-80d5-78865d7ecb08 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.120023] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Resuming the VM {{(pid=61852) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 981.120263] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56027593-5218-42c5-bda9-262585a8cb42 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.126695] env[61852]: DEBUG oslo_vmware.api [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 981.126695] env[61852]: value = "task-1293270" [ 981.126695] env[61852]: _type = "Task" [ 981.126695] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.134162] env[61852]: DEBUG oslo_vmware.api [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293270, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.232616] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293269, 'name': Rename_Task, 'duration_secs': 0.154281} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.232898] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 981.233181] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdb67b37-fcd5-41f2-9455-93d2d306097d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.240614] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 981.240614] env[61852]: value = "task-1293271" [ 981.240614] env[61852]: _type = "Task" [ 981.240614] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.252790] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293271, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.278626] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Releasing lock "refresh_cache-fb75509e-3cbf-406e-ad2d-aeb51a68295d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.278908] env[61852]: DEBUG nova.objects.instance [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lazy-loading 'migration_context' on Instance uuid fb75509e-3cbf-406e-ad2d-aeb51a68295d {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.523926] env[61852]: DEBUG oslo_concurrency.lockutils [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquiring lock "d9715a56-249f-4c19-a55b-730d352248cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.524470] env[61852]: DEBUG oslo_concurrency.lockutils [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lock "d9715a56-249f-4c19-a55b-730d352248cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.524470] env[61852]: DEBUG oslo_concurrency.lockutils [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquiring lock "d9715a56-249f-4c19-a55b-730d352248cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.524650] env[61852]: DEBUG oslo_concurrency.lockutils [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lock "d9715a56-249f-4c19-a55b-730d352248cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.524778] env[61852]: DEBUG oslo_concurrency.lockutils [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lock "d9715a56-249f-4c19-a55b-730d352248cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.527241] env[61852]: INFO nova.compute.manager [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Terminating instance [ 981.529089] env[61852]: DEBUG nova.compute.manager [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 981.529284] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.530225] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a2b3a3-18d6-4dca-abe0-3be765461d1c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.538784] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.539786] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5ff7444-c524-4fdc-817a-5eef5460d0ff {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.545672] env[61852]: DEBUG oslo_vmware.api [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for the task: (returnval){ [ 981.545672] env[61852]: value = "task-1293272" [ 981.545672] env[61852]: _type = "Task" [ 981.545672] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.553332] env[61852]: DEBUG oslo_vmware.api [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.579245] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "interface-d58958f2-7b6f-4480-9710-aa9e67ebd37c-825f3034-375c-417d-9d76-971f3239ff59" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.579624] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d58958f2-7b6f-4480-9710-aa9e67ebd37c-825f3034-375c-417d-9d76-971f3239ff59" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.637880] env[61852]: DEBUG oslo_vmware.api [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293270, 'name': PowerOnVM_Task} progress is 93%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.751596] env[61852]: DEBUG oslo_vmware.api [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293271, 'name': PowerOnVM_Task, 'duration_secs': 0.502614} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.751916] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 981.752127] env[61852]: INFO nova.compute.manager [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Took 8.36 seconds to spawn the instance on the hypervisor. [ 981.752311] env[61852]: DEBUG nova.compute.manager [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 981.753163] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f34cc76-1cc5-48ee-a7da-a9b30ee1d946 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.785150] env[61852]: DEBUG nova.objects.base [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 981.788453] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655ce2d7-a953-4e91-a452-34eafa91aed0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.814919] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-081a7b74-87c5-45fa-a021-c33ecb371978 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.823213] env[61852]: DEBUG oslo_vmware.api [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 981.823213] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524d7237-ea66-cbe0-3e18-0aa28045d094" [ 981.823213] env[61852]: _type = "Task" [ 981.823213] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.835717] env[61852]: DEBUG oslo_vmware.api [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524d7237-ea66-cbe0-3e18-0aa28045d094, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.055587] env[61852]: DEBUG oslo_vmware.api [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293272, 'name': PowerOffVM_Task, 'duration_secs': 0.313785} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.055893] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.056077] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 982.056478] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d9d191c-7cca-41c1-ab76-82649d952ced {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.082847] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.083169] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.084066] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9638cc51-972f-4dff-b4e6-b8c4bdf59594 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.108867] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6edbf806-d830-4abe-b724-8898d04cc99a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.144314] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Reconfiguring VM to detach interface {{(pid=61852) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 982.152682] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92252776-05df-4518-b5bb-7c90f6ca9509 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.166326] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.166592] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.166845] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Deleting the datastore file [datastore2] d9715a56-249f-4c19-a55b-730d352248cb {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.167556] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d99536a-8796-43c9-93b6-6088dc2c6b30 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.177607] env[61852]: DEBUG oslo_vmware.api [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293270, 'name': PowerOnVM_Task, 'duration_secs': 0.615819} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.179827] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Resumed the VM {{(pid=61852) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 982.180083] env[61852]: DEBUG nova.compute.manager [None req-c82444cf-0189-41a9-978c-cbb0b74adbbe tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 982.180422] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 982.180422] env[61852]: value = "task-1293275" [ 982.180422] env[61852]: _type = "Task" [ 982.180422] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.180650] env[61852]: DEBUG oslo_vmware.api [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for the task: (returnval){ [ 982.180650] env[61852]: value = "task-1293274" [ 982.180650] env[61852]: _type = "Task" [ 982.180650] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.181351] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f046e9-a371-4561-8a11-93fe019583ba {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.200873] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.201367] env[61852]: DEBUG oslo_vmware.api [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.275630] env[61852]: INFO nova.compute.manager [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Took 14.83 seconds to build instance. [ 982.292818] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "df332116-2ae3-4e51-99b0-108921470959" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.293128] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "df332116-2ae3-4e51-99b0-108921470959" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.293455] env[61852]: DEBUG nova.compute.manager [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Going to confirm migration 3 {{(pid=61852) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 982.334652] env[61852]: DEBUG oslo_vmware.api [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]524d7237-ea66-cbe0-3e18-0aa28045d094, 'name': SearchDatastore_Task, 'duration_secs': 0.01698} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.334998] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.335580] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.697231] env[61852]: DEBUG oslo_vmware.api [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Task: {'id': task-1293274, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295922} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.700659] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.700872] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.701066] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.701252] env[61852]: INFO nova.compute.manager [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Took 1.17 seconds to destroy the instance on the hypervisor. [ 982.701506] env[61852]: DEBUG oslo.service.loopingcall [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.701760] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.701930] env[61852]: DEBUG nova.compute.manager [-] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 982.702070] env[61852]: DEBUG nova.network.neutron [-] [instance: d9715a56-249f-4c19-a55b-730d352248cb] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.778592] env[61852]: DEBUG oslo_concurrency.lockutils [None req-04f7c9da-cfda-4623-8dcd-074dc786dacc tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.339s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.912633] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.913055] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.913376] env[61852]: DEBUG nova.network.neutron [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.913879] env[61852]: DEBUG nova.objects.instance [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lazy-loading 'info_cache' on Instance uuid df332116-2ae3-4e51-99b0-108921470959 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.996995] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa826d5-7bd9-4e67-9836-4003c11c0bf5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.004742] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7bc60f-c818-4e93-84b7-00aea8acadd2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.038093] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1031dc4b-2f9b-4d26-a582-692fcd8e6391 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.045875] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ad0f08-1924-45ef-8193-f4c9c7f3263b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.059910] env[61852]: DEBUG nova.compute.provider_tree [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.087851] env[61852]: DEBUG nova.compute.manager [req-8c67656a-5718-44dc-b998-060a07ed8601 req-1755e1b2-6ee5-4b12-8c7f-8e89d92b68c6 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Received event network-vif-deleted-a83d344f-834c-41d9-bb8f-27ebb900a57e {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 983.088150] env[61852]: INFO nova.compute.manager [req-8c67656a-5718-44dc-b998-060a07ed8601 req-1755e1b2-6ee5-4b12-8c7f-8e89d92b68c6 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Neutron deleted interface a83d344f-834c-41d9-bb8f-27ebb900a57e; detaching it from the instance and deleting it from the info cache [ 983.088396] env[61852]: DEBUG nova.network.neutron [req-8c67656a-5718-44dc-b998-060a07ed8601 req-1755e1b2-6ee5-4b12-8c7f-8e89d92b68c6 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.194974] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.510571] env[61852]: DEBUG nova.network.neutron [-] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.566248] env[61852]: DEBUG nova.scheduler.client.report [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 983.569839] env[61852]: DEBUG oslo_concurrency.lockutils [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.570102] env[61852]: DEBUG oslo_concurrency.lockutils [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.570314] env[61852]: DEBUG oslo_concurrency.lockutils [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.570502] env[61852]: DEBUG oslo_concurrency.lockutils [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.570671] env[61852]: DEBUG oslo_concurrency.lockutils [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.573065] env[61852]: INFO nova.compute.manager [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Terminating instance [ 983.574937] env[61852]: DEBUG nova.compute.manager [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 983.575143] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.576494] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96788aee-a1af-48fb-bb05-8e88326acea5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.584884] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.585167] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e058eb10-86df-4fce-95d6-638e5fa6412a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.590731] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-892f0bd7-b54e-49d0-a47b-9cf180084688 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.594391] env[61852]: DEBUG oslo_vmware.api [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 983.594391] env[61852]: value = "task-1293276" [ 983.594391] env[61852]: _type = "Task" [ 983.594391] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.602485] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ab2d49-3977-49a3-977a-f791790af96a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.618495] env[61852]: DEBUG oslo_vmware.api [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293276, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.635175] env[61852]: DEBUG nova.compute.manager [req-8c67656a-5718-44dc-b998-060a07ed8601 req-1755e1b2-6ee5-4b12-8c7f-8e89d92b68c6 service nova] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Detach interface failed, port_id=a83d344f-834c-41d9-bb8f-27ebb900a57e, reason: Instance d9715a56-249f-4c19-a55b-730d352248cb could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 983.694538] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.013573] env[61852]: INFO nova.compute.manager [-] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Took 1.31 seconds to deallocate network for instance. [ 984.104796] env[61852]: DEBUG oslo_vmware.api [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293276, 'name': PowerOffVM_Task, 'duration_secs': 0.172367} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.107324] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 984.107532] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 984.108073] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b779ae67-11ae-4b92-aea5-8ec5a0cb5264 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.147367] env[61852]: DEBUG nova.network.neutron [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance_info_cache with network_info: [{"id": "16ad03e7-b72d-4cdd-8da7-5314a7cad855", "address": "fa:16:3e:06:57:79", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ad03e7-b7", "ovs_interfaceid": "16ad03e7-b72d-4cdd-8da7-5314a7cad855", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.194954] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.328839] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 984.329080] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 984.329267] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleting the datastore file [datastore2] 9f39bee8-52b8-426d-9b8a-114e3a6a6343 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.329544] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61cca3f0-06fd-4427-b205-6d3db84dad95 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.335569] env[61852]: DEBUG oslo_vmware.api [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 984.335569] env[61852]: value = "task-1293278" [ 984.335569] env[61852]: _type = "Task" [ 984.335569] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.344444] env[61852]: DEBUG oslo_vmware.api [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.524314] env[61852]: DEBUG oslo_concurrency.lockutils [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.582147] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.247s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.585031] env[61852]: DEBUG oslo_concurrency.lockutils [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.061s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.585275] env[61852]: DEBUG nova.objects.instance [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lazy-loading 'resources' on Instance uuid d9715a56-249f-4c19-a55b-730d352248cb {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.650224] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-df332116-2ae3-4e51-99b0-108921470959" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.650500] env[61852]: DEBUG nova.objects.instance [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lazy-loading 'migration_context' on Instance uuid df332116-2ae3-4e51-99b0-108921470959 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.694578] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.846138] env[61852]: DEBUG oslo_vmware.api [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130368} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.846470] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.846662] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 984.846897] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.847020] env[61852]: INFO nova.compute.manager [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Took 1.27 seconds to destroy the instance on the hypervisor. [ 984.847265] env[61852]: DEBUG oslo.service.loopingcall [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.847502] env[61852]: DEBUG nova.compute.manager [-] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 984.847606] env[61852]: DEBUG nova.network.neutron [-] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 985.119707] env[61852]: DEBUG nova.compute.manager [req-fbae6a9e-caaf-4a79-b912-e6546d9fd91b req-0ee137e9-2bb4-401a-ad49-e4d7faac6a1f service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Received event network-vif-deleted-580ae921-fe6d-4dfb-8b65-4dd88fec14ed {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 985.119961] env[61852]: INFO nova.compute.manager [req-fbae6a9e-caaf-4a79-b912-e6546d9fd91b req-0ee137e9-2bb4-401a-ad49-e4d7faac6a1f service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Neutron deleted interface 580ae921-fe6d-4dfb-8b65-4dd88fec14ed; detaching it from the instance and deleting it from the info cache [ 985.120226] env[61852]: DEBUG nova.network.neutron [req-fbae6a9e-caaf-4a79-b912-e6546d9fd91b req-0ee137e9-2bb4-401a-ad49-e4d7faac6a1f service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.145140] env[61852]: INFO nova.scheduler.client.report [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted allocation for migration a0ebf04b-b0d3-4993-a5ba-06cba9c38fe7 [ 985.154770] env[61852]: DEBUG nova.objects.base [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 985.156018] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1d6176-ea35-40d6-95b5-d792edb7d6a0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.179721] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abd4096c-a194-4f3e-bbfb-3d0225f4dec3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.185188] env[61852]: DEBUG oslo_vmware.api [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 985.185188] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52859985-0533-81b0-6dd3-ea5fa080e15e" [ 985.185188] env[61852]: _type = "Task" [ 985.185188] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.197929] env[61852]: DEBUG oslo_vmware.api [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52859985-0533-81b0-6dd3-ea5fa080e15e, 'name': SearchDatastore_Task, 'duration_secs': 0.007101} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.198213] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.202252] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.223534] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5911eb-e3dc-4caa-aa09-cb2fa0081bfb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.234656] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee70fbe-cf96-41fd-9e99-59ad09fdeefe {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.264749] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3996e7-c12c-43ee-a1b3-695b185ecfdb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.272881] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6964f27e-2ba6-4cbc-8ba6-221c61cef1e3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.285739] env[61852]: DEBUG nova.compute.provider_tree [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.567389] env[61852]: DEBUG nova.network.neutron [-] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.622217] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39cc236b-f9e0-442b-8742-974b8c58355d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.634355] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645c38e4-45b6-42a0-bc61-d2e98b425ea7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.653633] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.696s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.670491] env[61852]: DEBUG nova.compute.manager [req-fbae6a9e-caaf-4a79-b912-e6546d9fd91b req-0ee137e9-2bb4-401a-ad49-e4d7faac6a1f service nova] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Detach interface failed, port_id=580ae921-fe6d-4dfb-8b65-4dd88fec14ed, reason: Instance 9f39bee8-52b8-426d-9b8a-114e3a6a6343 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 985.698538] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.806724] env[61852]: ERROR nova.scheduler.client.report [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] [req-45b20fd4-eb05-4ea2-adb8-528ad5cc7f31] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-45b20fd4-eb05-4ea2-adb8-528ad5cc7f31"}]} [ 985.830015] env[61852]: DEBUG nova.scheduler.client.report [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 985.845748] env[61852]: DEBUG nova.scheduler.client.report [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 985.845748] env[61852]: DEBUG nova.compute.provider_tree [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.866786] env[61852]: DEBUG nova.scheduler.client.report [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 985.894641] env[61852]: DEBUG nova.scheduler.client.report [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 986.007845] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435044dd-c048-4014-94c3-935553dc6c5f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.015208] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f6054a-d518-43c6-adbf-4692cf49e3f1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.044009] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e80c9bf-abff-46a6-8906-e1ca89247f9d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.050704] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4ad82e-818a-48fc-a21f-b1f184088b96 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.063325] env[61852]: DEBUG nova.compute.provider_tree [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.070238] env[61852]: INFO nova.compute.manager [-] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Took 1.22 seconds to deallocate network for instance. [ 986.200540] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.577033] env[61852]: DEBUG oslo_concurrency.lockutils [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.592985] env[61852]: DEBUG nova.scheduler.client.report [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 986.593274] env[61852]: DEBUG nova.compute.provider_tree [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 122 to 123 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 986.593456] env[61852]: DEBUG nova.compute.provider_tree [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.700952] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.980467] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.980779] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.981011] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.981223] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.981425] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.983704] env[61852]: INFO nova.compute.manager [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Terminating instance [ 986.988273] env[61852]: DEBUG nova.compute.manager [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 986.988470] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 986.989892] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27a7bfa-d383-44ad-b69f-58235262a750 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.998060] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 986.998656] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cbd089a7-4925-49f6-89b4-3b011be721cc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.004729] env[61852]: DEBUG oslo_vmware.api [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 987.004729] env[61852]: value = "task-1293279" [ 987.004729] env[61852]: _type = "Task" [ 987.004729] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.013385] env[61852]: DEBUG oslo_vmware.api [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.098831] env[61852]: DEBUG oslo_concurrency.lockutils [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.514s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.102108] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.903s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.124540] env[61852]: INFO nova.scheduler.client.report [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Deleted allocations for instance d9715a56-249f-4c19-a55b-730d352248cb [ 987.200244] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.493979] env[61852]: DEBUG oslo_concurrency.lockutils [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "8d8679db-eb9d-45c1-b053-70378f58e273" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.494403] env[61852]: DEBUG oslo_concurrency.lockutils [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "8d8679db-eb9d-45c1-b053-70378f58e273" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.494538] env[61852]: DEBUG oslo_concurrency.lockutils [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.494820] env[61852]: DEBUG oslo_concurrency.lockutils [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.495022] env[61852]: DEBUG oslo_concurrency.lockutils [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "8d8679db-eb9d-45c1-b053-70378f58e273-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.497261] env[61852]: INFO nova.compute.manager [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Terminating instance [ 987.499041] env[61852]: DEBUG nova.compute.manager [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 987.499277] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 987.500135] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ecc0148-a719-40ed-ac0c-eed25a032bb8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.510450] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 987.510698] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17ba838c-31a0-4d79-b0ba-46141e5d560d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.516763] env[61852]: DEBUG oslo_vmware.api [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293279, 'name': PowerOffVM_Task, 'duration_secs': 0.182436} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.517949] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 987.518142] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 987.518432] env[61852]: DEBUG oslo_vmware.api [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 987.518432] env[61852]: value = "task-1293280" [ 987.518432] env[61852]: _type = "Task" [ 987.518432] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.518697] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d8e2698-72eb-4c9f-984d-3924489da7d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.528624] env[61852]: DEBUG oslo_vmware.api [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293280, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.591883] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 987.592166] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 987.592403] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleting the datastore file [datastore1] fb75509e-3cbf-406e-ad2d-aeb51a68295d {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 987.592715] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac3af801-586d-4b24-84e7-3a313b9e4b5b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.600064] env[61852]: DEBUG oslo_vmware.api [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for the task: (returnval){ [ 987.600064] env[61852]: value = "task-1293282" [ 987.600064] env[61852]: _type = "Task" [ 987.600064] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.612432] env[61852]: DEBUG oslo_vmware.api [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.637421] env[61852]: DEBUG oslo_concurrency.lockutils [None req-637958b3-f482-418b-b5d6-aef3eb5bf1a4 tempest-ImagesNegativeTestJSON-1409077892 tempest-ImagesNegativeTestJSON-1409077892-project-member] Lock "d9715a56-249f-4c19-a55b-730d352248cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.113s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.700603] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.712553] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2b6e3a-c3f8-4a0d-b47c-03c28d450190 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.720089] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4eb661-8e8b-461c-a83a-75d29af521f6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.750977] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6b16a6-35a4-4c6e-b13a-a145c9d1a67c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.758310] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac3b9ad-ebaf-4418-966b-2633f2506e2b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.771888] env[61852]: DEBUG nova.compute.provider_tree [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.029820] env[61852]: DEBUG oslo_vmware.api [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293280, 'name': PowerOffVM_Task, 'duration_secs': 0.179086} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.030109] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 988.030287] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.030552] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02d71ebd-66c3-4c6a-b3fc-7d67485b7c13 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.087300] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.087544] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.087861] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleting the datastore file [datastore1] 8d8679db-eb9d-45c1-b053-70378f58e273 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.088214] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-529fb7a4-caa6-4f1d-987f-432b4b6856a1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.094178] env[61852]: DEBUG oslo_vmware.api [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for the task: (returnval){ [ 988.094178] env[61852]: value = "task-1293284" [ 988.094178] env[61852]: _type = "Task" [ 988.094178] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.101870] env[61852]: DEBUG oslo_vmware.api [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293284, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.108998] env[61852]: DEBUG oslo_vmware.api [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Task: {'id': task-1293282, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162788} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.109248] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 988.109484] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 988.109725] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 988.109956] env[61852]: INFO nova.compute.manager [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 988.110223] env[61852]: DEBUG oslo.service.loopingcall [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.110423] env[61852]: DEBUG nova.compute.manager [-] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 988.110521] env[61852]: DEBUG nova.network.neutron [-] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 988.202929] env[61852]: DEBUG oslo_vmware.api [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293275, 'name': ReconfigVM_Task, 'duration_secs': 5.741312} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.203155] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.203426] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Reconfigured VM to detach interface {{(pid=61852) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 988.275731] env[61852]: DEBUG nova.scheduler.client.report [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 988.462026] env[61852]: DEBUG nova.compute.manager [req-6f3169d0-0ff3-4dee-b5e7-dae06414b345 req-f7dda164-e39f-4d88-bee8-cfcc931ac094 service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Received event network-vif-deleted-41d20024-17d1-4e43-ad02-a6316dcc9c2f {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 988.462328] env[61852]: INFO nova.compute.manager [req-6f3169d0-0ff3-4dee-b5e7-dae06414b345 req-f7dda164-e39f-4d88-bee8-cfcc931ac094 service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Neutron deleted interface 41d20024-17d1-4e43-ad02-a6316dcc9c2f; detaching it from the instance and deleting it from the info cache [ 988.462434] env[61852]: DEBUG nova.network.neutron [req-6f3169d0-0ff3-4dee-b5e7-dae06414b345 req-f7dda164-e39f-4d88-bee8-cfcc931ac094 service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.603903] env[61852]: DEBUG oslo_vmware.api [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Task: {'id': task-1293284, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15196} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.605164] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 988.605164] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 988.605164] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 988.605368] env[61852]: INFO nova.compute.manager [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Took 1.11 seconds to destroy the instance on the hypervisor. [ 988.606027] env[61852]: DEBUG oslo.service.loopingcall [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.606027] env[61852]: DEBUG nova.compute.manager [-] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 988.606185] env[61852]: DEBUG nova.network.neutron [-] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 988.941273] env[61852]: DEBUG nova.network.neutron [-] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.964296] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f308c1a-6a0b-4ee1-89ba-82e9ce40bab0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.974370] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12078a3-5988-4baa-a435-afcdd3008361 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.999248] env[61852]: DEBUG nova.compute.manager [req-6f3169d0-0ff3-4dee-b5e7-dae06414b345 req-f7dda164-e39f-4d88-bee8-cfcc931ac094 service nova] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Detach interface failed, port_id=41d20024-17d1-4e43-ad02-a6316dcc9c2f, reason: Instance fb75509e-3cbf-406e-ad2d-aeb51a68295d could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 989.284822] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.184s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.289407] env[61852]: DEBUG oslo_concurrency.lockutils [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.713s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.289642] env[61852]: DEBUG nova.objects.instance [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lazy-loading 'resources' on Instance uuid 9f39bee8-52b8-426d-9b8a-114e3a6a6343 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.327121] env[61852]: DEBUG nova.network.neutron [-] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.443537] env[61852]: INFO nova.compute.manager [-] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Took 1.33 seconds to deallocate network for instance. [ 989.582551] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.582745] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquired lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.582928] env[61852]: DEBUG nova.network.neutron [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 989.830377] env[61852]: INFO nova.compute.manager [-] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Took 1.22 seconds to deallocate network for instance. [ 989.850260] env[61852]: INFO nova.scheduler.client.report [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted allocation for migration ccb20a30-bdc7-49d6-960d-caf0ea2fe3b6 [ 989.897149] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e6eb26-a291-42b8-b169-ed6a93864779 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.904422] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc913d8-c07e-494e-aa24-d78cdc66db85 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.933899] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8185152d-6451-420d-bfbe-f1b1b297c74a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.941222] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ffa41c-27d9-48d3-8709-b124fce1b23c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.955706] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.956193] env[61852]: DEBUG nova.compute.provider_tree [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.061640] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.061640] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.061809] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.061862] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.062073] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.064147] env[61852]: INFO nova.compute.manager [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Terminating instance [ 990.066554] env[61852]: DEBUG nova.compute.manager [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 990.066669] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 990.067525] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78f63c8-1d09-4e30-9ef9-eee6f59d7661 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.076634] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 990.076865] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-676ea210-65bd-419a-978f-255e2319da8a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.082451] env[61852]: DEBUG oslo_vmware.api [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 990.082451] env[61852]: value = "task-1293285" [ 990.082451] env[61852]: _type = "Task" [ 990.082451] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.091294] env[61852]: DEBUG oslo_vmware.api [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293285, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.324152] env[61852]: INFO nova.network.neutron [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Port 825f3034-375c-417d-9d76-971f3239ff59 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 990.324588] env[61852]: DEBUG nova.network.neutron [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updating instance_info_cache with network_info: [{"id": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "address": "fa:16:3e:fe:46:c7", "network": {"id": "d984a6fb-5f5f-4678-bc8a-3723c26f290a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-206988452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e93a6965a6884292bc56b01f7d54a622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb4d01a4-4b", "ovs_interfaceid": "fb4d01a4-4b0f-4591-aaf9-f8487c4cd460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.339256] env[61852]: DEBUG oslo_concurrency.lockutils [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.356111] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3a32cde1-e078-4857-b778-f576bd73ace5 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "df332116-2ae3-4e51-99b0-108921470959" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.063s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.462045] env[61852]: DEBUG nova.scheduler.client.report [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 990.490427] env[61852]: DEBUG nova.compute.manager [req-2db34b91-db80-4954-911e-8c4ed1e6475b req-843ef2f8-3826-4a10-aa22-160726f744f2 service nova] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Received event network-vif-deleted-83bdd4e5-8915-43d0-a8aa-fbf45ae4f40b {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 990.592865] env[61852]: DEBUG oslo_vmware.api [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293285, 'name': PowerOffVM_Task, 'duration_secs': 0.188587} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.593269] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 990.593405] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 990.593645] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3df6f30-6c1e-4c0e-9337-d781198ad0e4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.660523] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.660760] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.660947] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Deleting the datastore file [datastore2] d58958f2-7b6f-4480-9710-aa9e67ebd37c {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.661317] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e51400c8-e85f-4880-a587-98de4cef5fa9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.667377] env[61852]: DEBUG oslo_vmware.api [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 990.667377] env[61852]: value = "task-1293287" [ 990.667377] env[61852]: _type = "Task" [ 990.667377] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.675076] env[61852]: DEBUG oslo_vmware.api [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293287, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.828154] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Releasing lock "refresh_cache-d58958f2-7b6f-4480-9710-aa9e67ebd37c" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.969175] env[61852]: DEBUG oslo_concurrency.lockutils [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.680s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.971600] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.016s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.971803] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.974034] env[61852]: DEBUG oslo_concurrency.lockutils [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.635s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.974289] env[61852]: DEBUG nova.objects.instance [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lazy-loading 'resources' on Instance uuid 8d8679db-eb9d-45c1-b053-70378f58e273 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.985370] env[61852]: INFO nova.scheduler.client.report [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted allocations for instance 9f39bee8-52b8-426d-9b8a-114e3a6a6343 [ 990.995707] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "df332116-2ae3-4e51-99b0-108921470959" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.995962] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "df332116-2ae3-4e51-99b0-108921470959" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.996190] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "df332116-2ae3-4e51-99b0-108921470959-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.996376] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "df332116-2ae3-4e51-99b0-108921470959-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.996734] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "df332116-2ae3-4e51-99b0-108921470959-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.998934] env[61852]: INFO nova.scheduler.client.report [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Deleted allocations for instance fb75509e-3cbf-406e-ad2d-aeb51a68295d [ 991.000167] env[61852]: INFO nova.compute.manager [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Terminating instance [ 991.005609] env[61852]: DEBUG nova.compute.manager [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 991.005849] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.007994] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9714a2-935f-4946-acf3-b95a175eff70 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.016383] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.016704] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-074172b1-fd71-4063-bb7a-22efede23a5e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.023329] env[61852]: DEBUG oslo_vmware.api [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 991.023329] env[61852]: value = "task-1293288" [ 991.023329] env[61852]: _type = "Task" [ 991.023329] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.031339] env[61852]: DEBUG oslo_vmware.api [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293288, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.177408] env[61852]: DEBUG oslo_vmware.api [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293287, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148396} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.177733] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.177915] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 991.178109] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 991.178290] env[61852]: INFO nova.compute.manager [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 991.178532] env[61852]: DEBUG oslo.service.loopingcall [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.178725] env[61852]: DEBUG nova.compute.manager [-] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 991.178822] env[61852]: DEBUG nova.network.neutron [-] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 991.332923] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e151909c-1c93-421c-8b7c-5433c943eaa8 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "interface-d58958f2-7b6f-4480-9710-aa9e67ebd37c-825f3034-375c-417d-9d76-971f3239ff59" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.753s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.485316] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "76fa1b27-bd1f-4794-a56b-88373e79db9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.485586] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "76fa1b27-bd1f-4794-a56b-88373e79db9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.494728] env[61852]: DEBUG oslo_concurrency.lockutils [None req-47ce1498-93fd-43f8-8dfb-dd9508192438 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "9f39bee8-52b8-426d-9b8a-114e3a6a6343" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.925s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.514430] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b81084dd-d60d-4ad8-a193-965a1180f29f tempest-DeleteServersTestJSON-947876575 tempest-DeleteServersTestJSON-947876575-project-member] Lock "fb75509e-3cbf-406e-ad2d-aeb51a68295d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.533s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.537799] env[61852]: DEBUG oslo_vmware.api [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293288, 'name': PowerOffVM_Task, 'duration_secs': 0.273795} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.537799] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.537967] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.538264] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ae3e0c8-cf0f-4ecd-b68a-ac26ec1392db {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.566416] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4f3bec-c1ab-4b3d-bddb-233edec1d460 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.574940] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31957d1-7219-4970-bb12-01802c7e18d3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.604207] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcea029-999a-4cff-98ba-6ef0648af1e5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.611797] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eca346d-6187-46e2-9c09-d0e9cf087a71 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.626200] env[61852]: DEBUG nova.compute.provider_tree [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.628334] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.628529] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.628708] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleting the datastore file [datastore1] df332116-2ae3-4e51-99b0-108921470959 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.628950] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-726a4531-773f-432b-9666-2c738a6c2b2e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.635367] env[61852]: DEBUG oslo_vmware.api [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 991.635367] env[61852]: value = "task-1293290" [ 991.635367] env[61852]: _type = "Task" [ 991.635367] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.646133] env[61852]: DEBUG oslo_vmware.api [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.989307] env[61852]: DEBUG nova.compute.manager [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 992.133232] env[61852]: DEBUG nova.scheduler.client.report [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 992.146656] env[61852]: DEBUG oslo_vmware.api [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211653} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.147451] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.147451] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 992.147581] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.148162] env[61852]: INFO nova.compute.manager [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: df332116-2ae3-4e51-99b0-108921470959] Took 1.14 seconds to destroy the instance on the hypervisor. [ 992.148443] env[61852]: DEBUG oslo.service.loopingcall [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.148922] env[61852]: DEBUG nova.compute.manager [-] [instance: df332116-2ae3-4e51-99b0-108921470959] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 992.149074] env[61852]: DEBUG nova.network.neutron [-] [instance: df332116-2ae3-4e51-99b0-108921470959] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 992.257099] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "3ae6fdae-3246-4607-b15d-c320c4dc816b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.257099] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "3ae6fdae-3246-4607-b15d-c320c4dc816b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.408906] env[61852]: DEBUG nova.network.neutron [-] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.458426] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "33667154-991d-4a32-8f16-f292a4725e3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.458670] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "33667154-991d-4a32-8f16-f292a4725e3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.509790] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.525125] env[61852]: DEBUG nova.compute.manager [req-27439627-f7a9-4b19-addc-54e7619692ff req-9b469e3c-036c-4a4b-8a7e-0498fc103984 service nova] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Received event network-vif-deleted-fb4d01a4-4b0f-4591-aaf9-f8487c4cd460 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 992.525125] env[61852]: DEBUG nova.compute.manager [req-27439627-f7a9-4b19-addc-54e7619692ff req-9b469e3c-036c-4a4b-8a7e-0498fc103984 service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Received event network-vif-deleted-16ad03e7-b72d-4cdd-8da7-5314a7cad855 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 992.525332] env[61852]: INFO nova.compute.manager [req-27439627-f7a9-4b19-addc-54e7619692ff req-9b469e3c-036c-4a4b-8a7e-0498fc103984 service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Neutron deleted interface 16ad03e7-b72d-4cdd-8da7-5314a7cad855; detaching it from the instance and deleting it from the info cache [ 992.525466] env[61852]: DEBUG nova.network.neutron [req-27439627-f7a9-4b19-addc-54e7619692ff req-9b469e3c-036c-4a4b-8a7e-0498fc103984 service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.638136] env[61852]: DEBUG oslo_concurrency.lockutils [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.664s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.640806] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.131s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.642425] env[61852]: INFO nova.compute.claims [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 992.664109] env[61852]: INFO nova.scheduler.client.report [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Deleted allocations for instance 8d8679db-eb9d-45c1-b053-70378f58e273 [ 992.758719] env[61852]: DEBUG nova.compute.manager [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 992.900290] env[61852]: DEBUG nova.network.neutron [-] [instance: df332116-2ae3-4e51-99b0-108921470959] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.903191] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.903438] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.913446] env[61852]: INFO nova.compute.manager [-] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Took 1.73 seconds to deallocate network for instance. [ 992.960591] env[61852]: DEBUG nova.compute.manager [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 993.027768] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c33e9ce-6bf0-49b6-9930-43bfd0b96488 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.038111] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b960d65-b215-4d40-aa81-a5000ce4186f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.061585] env[61852]: DEBUG nova.compute.manager [req-27439627-f7a9-4b19-addc-54e7619692ff req-9b469e3c-036c-4a4b-8a7e-0498fc103984 service nova] [instance: df332116-2ae3-4e51-99b0-108921470959] Detach interface failed, port_id=16ad03e7-b72d-4cdd-8da7-5314a7cad855, reason: Instance df332116-2ae3-4e51-99b0-108921470959 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 993.174182] env[61852]: DEBUG oslo_concurrency.lockutils [None req-22660ac3-d5c7-426f-bc52-0696c38a47bc tempest-ServersNegativeTestJSON-196249676 tempest-ServersNegativeTestJSON-196249676-project-member] Lock "8d8679db-eb9d-45c1-b053-70378f58e273" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.680s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.282391] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.405909] env[61852]: INFO nova.compute.manager [-] [instance: df332116-2ae3-4e51-99b0-108921470959] Took 1.26 seconds to deallocate network for instance. [ 993.406400] env[61852]: DEBUG nova.compute.manager [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 993.424608] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.480034] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.780618] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab0dc39-231f-4079-9e2c-e4c21bd87ad0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.789467] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63efd529-64bb-4589-9cc0-d1ebebe0f96c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.821885] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de990c09-f672-478b-89d2-8da6e205d834 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.829349] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea840e66-b8bc-4879-93c1-3a747e4d1c20 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.844236] env[61852]: DEBUG nova.compute.provider_tree [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.914747] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.014448] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.350258] env[61852]: DEBUG nova.scheduler.client.report [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 994.857021] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.213s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.857021] env[61852]: DEBUG nova.compute.manager [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 994.858737] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.576s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.860207] env[61852]: INFO nova.compute.claims [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 995.365528] env[61852]: DEBUG nova.compute.utils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 995.372385] env[61852]: DEBUG nova.compute.manager [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 995.372385] env[61852]: DEBUG nova.network.neutron [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 995.413493] env[61852]: DEBUG nova.policy [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeca45e07f5b41e38b9ab8ac31bad06c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14a017ea2b084ae0ad2994dda7809c7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 995.852811] env[61852]: DEBUG nova.network.neutron [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Successfully created port: d573484a-4782-4aef-8fe9-088ba601cde5 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.871773] env[61852]: DEBUG nova.compute.manager [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 996.011466] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe065721-b695-43fb-a83e-4056af68f72c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.020604] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e82e33-5317-4f65-815b-5a73feb45231 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.058973] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34569208-fd29-42dc-8cdf-06c781c70553 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.068680] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc5d1a1e-7fd0-4d1a-8cf3-58af4c49b9f0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.839317] env[61852]: DEBUG nova.compute.provider_tree [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.330053] env[61852]: DEBUG nova.compute.manager [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 997.343812] env[61852]: DEBUG nova.scheduler.client.report [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 997.360914] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 997.361181] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 997.361344] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.361528] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 997.361686] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.361830] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 997.363641] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 997.363884] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 997.364091] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 997.364270] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 997.364455] env[61852]: DEBUG nova.virt.hardware [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.366871] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d764ad-26f8-45b5-b12a-40374cab0f92 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.377326] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b55fdf-35eb-4dbd-ac34-9ef9269025bc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.481713] env[61852]: DEBUG nova.compute.manager [req-2ce02b58-e892-4376-b7d6-888266290034 req-f4898f3a-bd40-42af-b7af-1e17b4afcb4a service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Received event network-vif-plugged-d573484a-4782-4aef-8fe9-088ba601cde5 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 997.481943] env[61852]: DEBUG oslo_concurrency.lockutils [req-2ce02b58-e892-4376-b7d6-888266290034 req-f4898f3a-bd40-42af-b7af-1e17b4afcb4a service nova] Acquiring lock "76fa1b27-bd1f-4794-a56b-88373e79db9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.482170] env[61852]: DEBUG oslo_concurrency.lockutils [req-2ce02b58-e892-4376-b7d6-888266290034 req-f4898f3a-bd40-42af-b7af-1e17b4afcb4a service nova] Lock "76fa1b27-bd1f-4794-a56b-88373e79db9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.482343] env[61852]: DEBUG oslo_concurrency.lockutils [req-2ce02b58-e892-4376-b7d6-888266290034 req-f4898f3a-bd40-42af-b7af-1e17b4afcb4a service nova] Lock "76fa1b27-bd1f-4794-a56b-88373e79db9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.482518] env[61852]: DEBUG nova.compute.manager [req-2ce02b58-e892-4376-b7d6-888266290034 req-f4898f3a-bd40-42af-b7af-1e17b4afcb4a service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] No waiting events found dispatching network-vif-plugged-d573484a-4782-4aef-8fe9-088ba601cde5 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 997.482690] env[61852]: WARNING nova.compute.manager [req-2ce02b58-e892-4376-b7d6-888266290034 req-f4898f3a-bd40-42af-b7af-1e17b4afcb4a service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Received unexpected event network-vif-plugged-d573484a-4782-4aef-8fe9-088ba601cde5 for instance with vm_state building and task_state spawning. [ 997.567366] env[61852]: DEBUG nova.network.neutron [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Successfully updated port: d573484a-4782-4aef-8fe9-088ba601cde5 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.706904] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquiring lock "9db95089-9fd7-42e5-bbf3-64847642ade6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.707160] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lock "9db95089-9fd7-42e5-bbf3-64847642ade6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.854025] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.994s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.854025] env[61852]: DEBUG nova.compute.manager [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 997.857553] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.433s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.857774] env[61852]: DEBUG nova.objects.instance [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'resources' on Instance uuid d58958f2-7b6f-4480-9710-aa9e67ebd37c {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.069734] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "refresh_cache-76fa1b27-bd1f-4794-a56b-88373e79db9a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.069902] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "refresh_cache-76fa1b27-bd1f-4794-a56b-88373e79db9a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.070060] env[61852]: DEBUG nova.network.neutron [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.213406] env[61852]: DEBUG nova.compute.manager [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 998.360562] env[61852]: DEBUG nova.compute.utils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 998.365025] env[61852]: DEBUG nova.compute.manager [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Not allocating networking since 'none' was specified. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 998.469015] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329a3d80-4d9b-44ad-b920-7941307fd36d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.477467] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4499e3-c6a8-4954-9d51-a0a468ae0193 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.507139] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35bac53-24ea-4ae4-bc52-33bee850990d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.514255] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3b0590-2f5d-4878-b110-1448b0215ef2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.527039] env[61852]: DEBUG nova.compute.provider_tree [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.599654] env[61852]: DEBUG nova.network.neutron [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.722562] env[61852]: DEBUG nova.network.neutron [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Updating instance_info_cache with network_info: [{"id": "d573484a-4782-4aef-8fe9-088ba601cde5", "address": "fa:16:3e:03:70:89", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd573484a-47", "ovs_interfaceid": "d573484a-4782-4aef-8fe9-088ba601cde5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.735996] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.866426] env[61852]: DEBUG nova.compute.manager [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 999.029449] env[61852]: DEBUG nova.scheduler.client.report [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 999.227113] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "refresh_cache-76fa1b27-bd1f-4794-a56b-88373e79db9a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.227439] env[61852]: DEBUG nova.compute.manager [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Instance network_info: |[{"id": "d573484a-4782-4aef-8fe9-088ba601cde5", "address": "fa:16:3e:03:70:89", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd573484a-47", "ovs_interfaceid": "d573484a-4782-4aef-8fe9-088ba601cde5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 999.227875] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:70:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd573484a-4782-4aef-8fe9-088ba601cde5', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.235113] env[61852]: DEBUG oslo.service.loopingcall [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 999.235332] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.235560] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4682b23-a426-476b-bb94-efe85a5ea73f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.254675] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.254675] env[61852]: value = "task-1293291" [ 999.254675] env[61852]: _type = "Task" [ 999.254675] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.261912] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293291, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.505692] env[61852]: DEBUG nova.compute.manager [req-3564396e-1d30-4068-a058-12df95a4d01a req-d1e151e1-1ed2-4f37-bf94-f32576b10bf2 service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Received event network-changed-d573484a-4782-4aef-8fe9-088ba601cde5 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 999.505891] env[61852]: DEBUG nova.compute.manager [req-3564396e-1d30-4068-a058-12df95a4d01a req-d1e151e1-1ed2-4f37-bf94-f32576b10bf2 service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Refreshing instance network info cache due to event network-changed-d573484a-4782-4aef-8fe9-088ba601cde5. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 999.506123] env[61852]: DEBUG oslo_concurrency.lockutils [req-3564396e-1d30-4068-a058-12df95a4d01a req-d1e151e1-1ed2-4f37-bf94-f32576b10bf2 service nova] Acquiring lock "refresh_cache-76fa1b27-bd1f-4794-a56b-88373e79db9a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.506275] env[61852]: DEBUG oslo_concurrency.lockutils [req-3564396e-1d30-4068-a058-12df95a4d01a req-d1e151e1-1ed2-4f37-bf94-f32576b10bf2 service nova] Acquired lock "refresh_cache-76fa1b27-bd1f-4794-a56b-88373e79db9a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.506437] env[61852]: DEBUG nova.network.neutron [req-3564396e-1d30-4068-a058-12df95a4d01a req-d1e151e1-1ed2-4f37-bf94-f32576b10bf2 service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Refreshing network info cache for port d573484a-4782-4aef-8fe9-088ba601cde5 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.534380] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.536908] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.056s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.537749] env[61852]: INFO nova.compute.claims [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 999.553112] env[61852]: INFO nova.scheduler.client.report [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Deleted allocations for instance d58958f2-7b6f-4480-9710-aa9e67ebd37c [ 999.764427] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293291, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.878089] env[61852]: DEBUG nova.compute.manager [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 999.901694] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 999.901949] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 999.902125] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 999.902318] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 999.902467] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 999.902616] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 999.902822] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 999.902985] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 999.903169] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 999.903334] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 999.903507] env[61852]: DEBUG nova.virt.hardware [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 999.904365] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58571e40-ec15-48f4-9bac-4308ecc2c38c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.912202] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e8f300-9612-4242-b43c-0163e42e7464 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.924662] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.929984] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Creating folder: Project (039daa6c3a0f4e78afe61e1e2ff837df). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.930252] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8e9f7db-f02e-4b63-96f8-eec5ae8d27ec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.939858] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Created folder: Project (039daa6c3a0f4e78afe61e1e2ff837df) in parent group-v277280. [ 999.940058] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Creating folder: Instances. Parent ref: group-v277408. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.940277] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1b0fad0-a6c1-41b7-a063-5b45715998e7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.948210] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Created folder: Instances in parent group-v277408. [ 999.948430] env[61852]: DEBUG oslo.service.loopingcall [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 999.948613] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.948800] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c54d876-927a-4843-89e6-f913bb22c755 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.963456] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.963456] env[61852]: value = "task-1293294" [ 999.963456] env[61852]: _type = "Task" [ 999.963456] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.973678] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293294, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.061398] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d6036f18-802f-4c68-8acb-aa3938cbd316 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "d58958f2-7b6f-4480-9710-aa9e67ebd37c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.214630] env[61852]: DEBUG nova.network.neutron [req-3564396e-1d30-4068-a058-12df95a4d01a req-d1e151e1-1ed2-4f37-bf94-f32576b10bf2 service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Updated VIF entry in instance network info cache for port d573484a-4782-4aef-8fe9-088ba601cde5. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.215113] env[61852]: DEBUG nova.network.neutron [req-3564396e-1d30-4068-a058-12df95a4d01a req-d1e151e1-1ed2-4f37-bf94-f32576b10bf2 service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Updating instance_info_cache with network_info: [{"id": "d573484a-4782-4aef-8fe9-088ba601cde5", "address": "fa:16:3e:03:70:89", "network": {"id": "37c975fc-d484-4e07-82b4-dc10db3dab61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2132613748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14a017ea2b084ae0ad2994dda7809c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd573484a-47", "ovs_interfaceid": "d573484a-4782-4aef-8fe9-088ba601cde5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.265578] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293291, 'name': CreateVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.473496] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293294, 'name': CreateVM_Task, 'duration_secs': 0.243514} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.473496] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.473979] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.474079] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.474401] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1000.474933] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8afb51ec-958a-4c44-b5a1-af085e53839d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.483643] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1000.483643] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5296adc1-56e8-8fa4-63b8-aa80f00b656d" [ 1000.483643] env[61852]: _type = "Task" [ 1000.483643] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.492318] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5296adc1-56e8-8fa4-63b8-aa80f00b656d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.632911] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddd5606-544d-476b-b5c0-c48c03232614 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.640660] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a516de7-39d7-4ac7-a4c1-35bcea8cddac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.670100] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d1d2f9-0af7-490f-9b29-105f0515f0ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.677178] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f2e20f-fc0b-4912-a0ae-bddf139f242b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.691642] env[61852]: DEBUG nova.compute.provider_tree [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.718018] env[61852]: DEBUG oslo_concurrency.lockutils [req-3564396e-1d30-4068-a058-12df95a4d01a req-d1e151e1-1ed2-4f37-bf94-f32576b10bf2 service nova] Releasing lock "refresh_cache-76fa1b27-bd1f-4794-a56b-88373e79db9a" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.766935] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293291, 'name': CreateVM_Task, 'duration_secs': 1.281838} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.767150] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.767853] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.984791] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.985157] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.985268] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.985435] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.985607] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.990484] env[61852]: INFO nova.compute.manager [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Terminating instance [ 1000.992518] env[61852]: DEBUG nova.compute.manager [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1000.992714] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.993457] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7820aa61-9a31-4a16-a8a2-41a498c948d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.999349] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5296adc1-56e8-8fa4-63b8-aa80f00b656d, 'name': SearchDatastore_Task, 'duration_secs': 0.016482} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.999923] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.000159] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.000385] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.000536] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.000716] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.000984] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.001296] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1001.001764] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32ad6910-ed1c-458f-82af-6abd99242423 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.005136] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f87f570-9adb-47d8-b7c0-ce70040c2ab6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.006519] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1001.007031] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db30a2ac-065f-40bd-8d3c-dcdbb5e6d5c9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.011480] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 1001.011480] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523c0fe1-76f0-667d-a580-71202e66fa99" [ 1001.011480] env[61852]: _type = "Task" [ 1001.011480] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.015708] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.015883] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1001.016602] env[61852]: DEBUG oslo_vmware.api [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 1001.016602] env[61852]: value = "task-1293295" [ 1001.016602] env[61852]: _type = "Task" [ 1001.016602] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.017068] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48dee265-c906-420c-89cc-813df7918315 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.024547] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]523c0fe1-76f0-667d-a580-71202e66fa99, 'name': SearchDatastore_Task, 'duration_secs': 0.007718} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.025118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.025351] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.025557] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.027904] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1001.027904] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5268d1ba-80d5-2466-f9ab-3e5ce24bdab9" [ 1001.027904] env[61852]: _type = "Task" [ 1001.027904] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.030696] env[61852]: DEBUG oslo_vmware.api [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.037500] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5268d1ba-80d5-2466-f9ab-3e5ce24bdab9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.194267] env[61852]: DEBUG nova.scheduler.client.report [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1001.528825] env[61852]: DEBUG oslo_vmware.api [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293295, 'name': PowerOffVM_Task, 'duration_secs': 0.184895} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.529122] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.529297] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.529546] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5de2e8c2-9751-4927-afb1-0b317126312a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.539392] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5268d1ba-80d5-2466-f9ab-3e5ce24bdab9, 'name': SearchDatastore_Task, 'duration_secs': 0.00923} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.540088] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-544b8e2f-3f64-446e-a33d-49f8101c50b1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.544622] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1001.544622] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5227dd4d-a584-5663-b329-b473eb14d498" [ 1001.544622] env[61852]: _type = "Task" [ 1001.544622] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.551730] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5227dd4d-a584-5663-b329-b473eb14d498, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.584141] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.584363] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.584548] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Deleting the datastore file [datastore1] 4623565b-cd36-498c-a0e9-c3b1c6ef479b {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.584821] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b239a46-8161-4da4-8173-033bc8ef5906 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.591079] env[61852]: DEBUG oslo_vmware.api [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for the task: (returnval){ [ 1001.591079] env[61852]: value = "task-1293297" [ 1001.591079] env[61852]: _type = "Task" [ 1001.591079] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.598756] env[61852]: DEBUG oslo_vmware.api [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293297, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.698990] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.163s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.699637] env[61852]: DEBUG nova.compute.manager [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1001.702411] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.788s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.702619] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.704674] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.690s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.706286] env[61852]: INFO nova.compute.claims [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.726186] env[61852]: INFO nova.scheduler.client.report [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted allocations for instance df332116-2ae3-4e51-99b0-108921470959 [ 1002.054693] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5227dd4d-a584-5663-b329-b473eb14d498, 'name': SearchDatastore_Task, 'duration_secs': 0.008799} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.055068] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.055272] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 3ae6fdae-3246-4607-b15d-c320c4dc816b/3ae6fdae-3246-4607-b15d-c320c4dc816b.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1002.055550] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.055739] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.055947] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6993491d-9117-4942-9d77-994a0e3cbca8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.057814] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aab5b5aa-54dd-46a8-95bc-729ddec05a6f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.064576] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1002.064576] env[61852]: value = "task-1293298" [ 1002.064576] env[61852]: _type = "Task" [ 1002.064576] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.068120] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.068297] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1002.069232] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-493fc81a-2cc1-43e4-ba67-21d024107f2d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.073978] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293298, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.076863] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 1002.076863] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529112b0-f1b6-342b-9983-6ea7b0e99974" [ 1002.076863] env[61852]: _type = "Task" [ 1002.076863] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.083965] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529112b0-f1b6-342b-9983-6ea7b0e99974, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.099702] env[61852]: DEBUG oslo_vmware.api [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Task: {'id': task-1293297, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114396} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.099867] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.100066] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1002.100248] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1002.100419] env[61852]: INFO nova.compute.manager [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1002.100656] env[61852]: DEBUG oslo.service.loopingcall [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.100846] env[61852]: DEBUG nova.compute.manager [-] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1002.100942] env[61852]: DEBUG nova.network.neutron [-] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1002.210419] env[61852]: DEBUG nova.compute.utils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1002.213949] env[61852]: DEBUG nova.compute.manager [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Not allocating networking since 'none' was specified. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1002.232953] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fe360d91-280c-4134-a132-f1dca21559bb tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "df332116-2ae3-4e51-99b0-108921470959" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.237s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.365597] env[61852]: DEBUG nova.compute.manager [req-bf152d20-cff7-4f91-80aa-e135bb595551 req-8eb08e5b-c2a4-4cff-85ff-b5a1d27e1537 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Received event network-vif-deleted-6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1002.365710] env[61852]: INFO nova.compute.manager [req-bf152d20-cff7-4f91-80aa-e135bb595551 req-8eb08e5b-c2a4-4cff-85ff-b5a1d27e1537 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Neutron deleted interface 6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883; detaching it from the instance and deleting it from the info cache [ 1002.365853] env[61852]: DEBUG nova.network.neutron [req-bf152d20-cff7-4f91-80aa-e135bb595551 req-8eb08e5b-c2a4-4cff-85ff-b5a1d27e1537 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.574288] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293298, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.585969] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]529112b0-f1b6-342b-9983-6ea7b0e99974, 'name': SearchDatastore_Task, 'duration_secs': 0.008556} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.586766] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d540e15-b6b3-4227-99ec-b89dfb1a0c6f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.591589] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 1002.591589] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520efd57-54b8-664a-18ad-204112fb2413" [ 1002.591589] env[61852]: _type = "Task" [ 1002.591589] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.599312] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520efd57-54b8-664a-18ad-204112fb2413, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.715190] env[61852]: DEBUG nova.compute.manager [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1002.799456] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70621681-7b6e-476d-b28e-ad6a504bac7f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.806975] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cab5228-05a0-4ba4-81ee-952b1df8e480 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.837972] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e92673-6a50-467e-8a83-7f84e3d49598 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.840755] env[61852]: DEBUG nova.network.neutron [-] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.845171] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1904ffaa-0dee-4fa6-ad15-c4773f35fef2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.858877] env[61852]: DEBUG nova.compute.provider_tree [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.869175] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa7670b9-46e6-484b-88c2-682d497f7e45 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.877642] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157b6ffc-6c3e-4177-b613-680eda1cdc15 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.901313] env[61852]: DEBUG nova.compute.manager [req-bf152d20-cff7-4f91-80aa-e135bb595551 req-8eb08e5b-c2a4-4cff-85ff-b5a1d27e1537 service nova] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Detach interface failed, port_id=6ab757ae-eb63-4d4d-bdbf-3c3fdcdc4883, reason: Instance 4623565b-cd36-498c-a0e9-c3b1c6ef479b could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1003.075700] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293298, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525931} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.076093] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 3ae6fdae-3246-4607-b15d-c320c4dc816b/3ae6fdae-3246-4607-b15d-c320c4dc816b.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.076185] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.076378] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cbb51921-863e-4822-aa1e-ab7d93b8d2e0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.082091] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1003.082091] env[61852]: value = "task-1293299" [ 1003.082091] env[61852]: _type = "Task" [ 1003.082091] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.088801] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293299, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.099271] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]520efd57-54b8-664a-18ad-204112fb2413, 'name': SearchDatastore_Task, 'duration_secs': 0.034795} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.099500] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.099748] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 76fa1b27-bd1f-4794-a56b-88373e79db9a/76fa1b27-bd1f-4794-a56b-88373e79db9a.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1003.100031] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e03e2ae-4250-42cb-a10c-a0b639f6a38b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.105182] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 1003.105182] env[61852]: value = "task-1293300" [ 1003.105182] env[61852]: _type = "Task" [ 1003.105182] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.112016] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293300, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.342906] env[61852]: INFO nova.compute.manager [-] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Took 1.24 seconds to deallocate network for instance. [ 1003.361547] env[61852]: DEBUG nova.scheduler.client.report [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1003.590903] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293299, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062104} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.591209] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1003.591956] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371e28c6-a0ee-485a-a4f3-e4e5aebdca12 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.610573] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 3ae6fdae-3246-4607-b15d-c320c4dc816b/3ae6fdae-3246-4607-b15d-c320c4dc816b.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.610911] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edbb7908-dd13-4868-b393-6bedebf23fae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.632786] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293300, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459249} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.633975] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] 76fa1b27-bd1f-4794-a56b-88373e79db9a/76fa1b27-bd1f-4794-a56b-88373e79db9a.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.634225] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.634531] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1003.634531] env[61852]: value = "task-1293301" [ 1003.634531] env[61852]: _type = "Task" [ 1003.634531] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.634767] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f51d4d8f-710e-42cd-b48b-ce4a62afafa1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.642857] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 1003.642857] env[61852]: value = "task-1293302" [ 1003.642857] env[61852]: _type = "Task" [ 1003.642857] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.645335] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293301, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.653014] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293302, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.726760] env[61852]: DEBUG nova.compute.manager [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1003.752211] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1003.752471] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1003.752632] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1003.752816] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1003.752967] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1003.753141] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1003.753353] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1003.753517] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1003.753688] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1003.753854] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1003.754087] env[61852]: DEBUG nova.virt.hardware [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1003.754996] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5462956-61ce-4a63-9f8e-8fa3f85811ae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.762813] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5b3678-f82d-41cc-b26a-7220b127fd27 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.775792] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1003.781216] env[61852]: DEBUG oslo.service.loopingcall [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1003.781460] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1003.781667] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f8521bb-14fd-4ae3-ab05-1f673d8290cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.797198] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1003.797198] env[61852]: value = "task-1293303" [ 1003.797198] env[61852]: _type = "Task" [ 1003.797198] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.804255] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293303, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.849826] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.866023] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.161s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.866525] env[61852]: DEBUG nova.compute.manager [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1003.869441] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.134s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.870893] env[61852]: INFO nova.compute.claims [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1004.145966] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293301, 'name': ReconfigVM_Task, 'duration_secs': 0.419555} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.149112] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 3ae6fdae-3246-4607-b15d-c320c4dc816b/3ae6fdae-3246-4607-b15d-c320c4dc816b.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.149701] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6aba9f57-342c-41e8-b683-b90efcaf9f79 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.156557] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293302, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078818} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.157773] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1004.158116] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1004.158116] env[61852]: value = "task-1293304" [ 1004.158116] env[61852]: _type = "Task" [ 1004.158116] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.158767] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e825c43-9671-47d9-a302-db003422d659 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.169712] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293304, 'name': Rename_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.186366] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 76fa1b27-bd1f-4794-a56b-88373e79db9a/76fa1b27-bd1f-4794-a56b-88373e79db9a.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1004.186632] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-904960aa-d029-4587-a77f-0b5a6c6e48ac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.205538] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 1004.205538] env[61852]: value = "task-1293305" [ 1004.205538] env[61852]: _type = "Task" [ 1004.205538] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.214867] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293305, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.306834] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293303, 'name': CreateVM_Task, 'duration_secs': 0.28979} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.307064] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.307449] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.307613] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.307957] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1004.308221] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f48e176-3ac7-4c36-9e57-0aa1905a1d71 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.313097] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1004.313097] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52256309-0916-1feb-2cf4-17c6a7765777" [ 1004.313097] env[61852]: _type = "Task" [ 1004.313097] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.320187] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52256309-0916-1feb-2cf4-17c6a7765777, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.377798] env[61852]: DEBUG nova.compute.utils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1004.380296] env[61852]: DEBUG nova.compute.manager [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1004.380489] env[61852]: DEBUG nova.network.neutron [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1004.425597] env[61852]: DEBUG nova.policy [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1349b8262e345068742af657fa8cbd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4dbb543c66364861bf5f437c8c33a550', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 1004.657080] env[61852]: DEBUG nova.network.neutron [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Successfully created port: 155536cc-5884-4a09-8035-a2768a29dc81 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1004.672087] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293304, 'name': Rename_Task, 'duration_secs': 0.141354} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.672087] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1004.672087] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d29419dd-700c-4ca9-ade7-57d6b668860c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.677460] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1004.677460] env[61852]: value = "task-1293306" [ 1004.677460] env[61852]: _type = "Task" [ 1004.677460] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.685086] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293306, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.714543] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293305, 'name': ReconfigVM_Task, 'duration_secs': 0.274626} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.715030] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 76fa1b27-bd1f-4794-a56b-88373e79db9a/76fa1b27-bd1f-4794-a56b-88373e79db9a.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.715710] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbf2d8c9-c6d1-4871-b75e-55171db4f2f0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.721467] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 1004.721467] env[61852]: value = "task-1293307" [ 1004.721467] env[61852]: _type = "Task" [ 1004.721467] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.731018] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293307, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.824681] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52256309-0916-1feb-2cf4-17c6a7765777, 'name': SearchDatastore_Task, 'duration_secs': 0.009155} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.825098] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.825407] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.825775] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.825928] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.826183] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.826500] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db095c43-82b4-4ae5-9673-a05dc4ed3240 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.835382] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.835659] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.836489] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0799cf00-845e-4d09-8b42-37ef0316a411 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.842985] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1004.842985] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5200d330-5463-39e1-965c-10d5d62cd69f" [ 1004.842985] env[61852]: _type = "Task" [ 1004.842985] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.851700] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5200d330-5463-39e1-965c-10d5d62cd69f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.880596] env[61852]: DEBUG nova.compute.manager [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1004.975615] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5e141c-eb37-40fc-b376-12fa0f05b381 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.983010] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3009092f-7396-48b7-a714-96892555aa4a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.012710] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004f874b-b3c1-4b8e-a3ba-aa556f6ec222 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.019596] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b80293b-8c35-44fc-85ae-81583a098f0d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.032261] env[61852]: DEBUG nova.compute.provider_tree [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.187754] env[61852]: DEBUG oslo_vmware.api [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293306, 'name': PowerOnVM_Task, 'duration_secs': 0.438065} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.188052] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.188052] env[61852]: INFO nova.compute.manager [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Took 5.31 seconds to spawn the instance on the hypervisor. [ 1005.189173] env[61852]: DEBUG nova.compute.manager [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1005.189173] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafbfd77-a372-42e4-a436-9d43ff682494 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.230451] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293307, 'name': Rename_Task, 'duration_secs': 0.16435} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.231406] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.231665] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6c21f1d-62c6-42e2-b6f7-0c46281e6411 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.238033] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 1005.238033] env[61852]: value = "task-1293308" [ 1005.238033] env[61852]: _type = "Task" [ 1005.238033] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.245783] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293308, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.353026] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5200d330-5463-39e1-965c-10d5d62cd69f, 'name': SearchDatastore_Task, 'duration_secs': 0.028236} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.353793] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc9a21fd-bef7-4e98-8656-5dbab38db2b5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.358625] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1005.358625] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52181774-2c13-8e94-162d-0a10c11c003b" [ 1005.358625] env[61852]: _type = "Task" [ 1005.358625] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.365711] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52181774-2c13-8e94-162d-0a10c11c003b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.535515] env[61852]: DEBUG nova.scheduler.client.report [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1005.704344] env[61852]: INFO nova.compute.manager [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Took 12.44 seconds to build instance. [ 1005.747802] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293308, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.868611] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52181774-2c13-8e94-162d-0a10c11c003b, 'name': SearchDatastore_Task, 'duration_secs': 0.009078} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.868883] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.869153] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 33667154-991d-4a32-8f16-f292a4725e3e/33667154-991d-4a32-8f16-f292a4725e3e.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1005.869417] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1412e390-d07a-4b2a-8b34-94e1919d832a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.875789] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1005.875789] env[61852]: value = "task-1293309" [ 1005.875789] env[61852]: _type = "Task" [ 1005.875789] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.882872] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.891977] env[61852]: DEBUG nova.compute.manager [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1005.916173] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1005.916435] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1005.916598] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1005.916814] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1005.916983] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1005.917156] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1005.917375] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1005.917543] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1005.917716] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1005.917881] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1005.918067] env[61852]: DEBUG nova.virt.hardware [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1005.918920] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffa8e3d-433f-4254-8629-658dda36be9c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.926425] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a42754a-ebf5-4296-9a8b-c2674ff14773 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.040245] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.171s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.040806] env[61852]: DEBUG nova.compute.manager [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1006.043445] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.194s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.043675] env[61852]: DEBUG nova.objects.instance [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lazy-loading 'resources' on Instance uuid 4623565b-cd36-498c-a0e9-c3b1c6ef479b {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.092736] env[61852]: DEBUG nova.compute.manager [req-51e9934b-2461-4676-8733-1a5769ff77b0 req-9582dd8b-7ca9-46a8-b202-034f60accd7b service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Received event network-vif-plugged-155536cc-5884-4a09-8035-a2768a29dc81 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1006.093007] env[61852]: DEBUG oslo_concurrency.lockutils [req-51e9934b-2461-4676-8733-1a5769ff77b0 req-9582dd8b-7ca9-46a8-b202-034f60accd7b service nova] Acquiring lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.093278] env[61852]: DEBUG oslo_concurrency.lockutils [req-51e9934b-2461-4676-8733-1a5769ff77b0 req-9582dd8b-7ca9-46a8-b202-034f60accd7b service nova] Lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.093436] env[61852]: DEBUG oslo_concurrency.lockutils [req-51e9934b-2461-4676-8733-1a5769ff77b0 req-9582dd8b-7ca9-46a8-b202-034f60accd7b service nova] Lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.093670] env[61852]: DEBUG nova.compute.manager [req-51e9934b-2461-4676-8733-1a5769ff77b0 req-9582dd8b-7ca9-46a8-b202-034f60accd7b service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] No waiting events found dispatching network-vif-plugged-155536cc-5884-4a09-8035-a2768a29dc81 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1006.093781] env[61852]: WARNING nova.compute.manager [req-51e9934b-2461-4676-8733-1a5769ff77b0 req-9582dd8b-7ca9-46a8-b202-034f60accd7b service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Received unexpected event network-vif-plugged-155536cc-5884-4a09-8035-a2768a29dc81 for instance with vm_state building and task_state spawning. [ 1006.198272] env[61852]: DEBUG nova.network.neutron [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Successfully updated port: 155536cc-5884-4a09-8035-a2768a29dc81 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1006.206802] env[61852]: DEBUG oslo_concurrency.lockutils [None req-620a61f3-3354-424e-9f3e-d9f043dbce37 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "3ae6fdae-3246-4607-b15d-c320c4dc816b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.950s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.250818] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293308, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.387892] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293309, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.546694] env[61852]: DEBUG nova.compute.utils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1006.551032] env[61852]: DEBUG nova.compute.manager [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1006.551191] env[61852]: DEBUG nova.network.neutron [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1006.591933] env[61852]: DEBUG nova.policy [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67db4d6bda9f42eea53940c5dffe80c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68498fcf21ac47abab34e5809137c1aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 1006.634612] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653247eb-509e-439d-a3f3-f0f897fa471c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.642999] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4475e4ef-2b9e-4a59-afa3-f169412465fd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.672292] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939698b8-7adb-4609-bc7d-dd55962296e2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.679022] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f929d0b-27ee-493c-867e-a70256950d12 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.691832] env[61852]: DEBUG nova.compute.provider_tree [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.701233] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "refresh_cache-8d6dc967-ebe5-4573-b41a-5793f96b7eec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.701482] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "refresh_cache-8d6dc967-ebe5-4573-b41a-5793f96b7eec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.701541] env[61852]: DEBUG nova.network.neutron [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.747623] env[61852]: DEBUG oslo_vmware.api [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293308, 'name': PowerOnVM_Task, 'duration_secs': 1.121751} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.747906] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1006.748127] env[61852]: INFO nova.compute.manager [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Took 9.42 seconds to spawn the instance on the hypervisor. [ 1006.748314] env[61852]: DEBUG nova.compute.manager [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1006.749053] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652aee0d-d4a9-4145-8d08-a4d92d76c8a4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.827321] env[61852]: DEBUG nova.network.neutron [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Successfully created port: 0846a1e1-631a-4825-96c0-abda4b16c822 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1006.886552] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.725058} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.886878] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 33667154-991d-4a32-8f16-f292a4725e3e/33667154-991d-4a32-8f16-f292a4725e3e.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.887114] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.887363] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58c17895-4fd4-4caf-86e6-27a370ffbe2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.894071] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1006.894071] env[61852]: value = "task-1293310" [ 1006.894071] env[61852]: _type = "Task" [ 1006.894071] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.902013] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.052397] env[61852]: DEBUG nova.compute.manager [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1007.195145] env[61852]: DEBUG nova.scheduler.client.report [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1007.231807] env[61852]: DEBUG nova.network.neutron [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1007.263499] env[61852]: INFO nova.compute.manager [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Took 14.77 seconds to build instance. [ 1007.358723] env[61852]: DEBUG nova.network.neutron [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Updating instance_info_cache with network_info: [{"id": "155536cc-5884-4a09-8035-a2768a29dc81", "address": "fa:16:3e:e4:5f:61", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap155536cc-58", "ovs_interfaceid": "155536cc-5884-4a09-8035-a2768a29dc81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.403893] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.702470] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.658s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.724336] env[61852]: INFO nova.scheduler.client.report [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Deleted allocations for instance 4623565b-cd36-498c-a0e9-c3b1c6ef479b [ 1007.765934] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5c153238-e85d-4d84-ba11-cc6e77b53624 tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "76fa1b27-bd1f-4794-a56b-88373e79db9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.280s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.861190] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "refresh_cache-8d6dc967-ebe5-4573-b41a-5793f96b7eec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.861529] env[61852]: DEBUG nova.compute.manager [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Instance network_info: |[{"id": "155536cc-5884-4a09-8035-a2768a29dc81", "address": "fa:16:3e:e4:5f:61", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap155536cc-58", "ovs_interfaceid": "155536cc-5884-4a09-8035-a2768a29dc81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1007.861983] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:5f:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2c424c9-6446-4b2a-af8c-4d9c29117c39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '155536cc-5884-4a09-8035-a2768a29dc81', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.869595] env[61852]: DEBUG oslo.service.loopingcall [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1007.869871] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1007.870085] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-018478d3-1d2d-4e73-bcaf-6ab56bb3c3f9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.890667] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.890667] env[61852]: value = "task-1293311" [ 1007.890667] env[61852]: _type = "Task" [ 1007.890667] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.901122] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293311, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.905814] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.065109] env[61852]: DEBUG nova.compute.manager [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1008.089295] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1008.089551] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1008.089715] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1008.089900] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1008.090064] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1008.090224] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1008.090433] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1008.090597] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1008.090765] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1008.090928] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1008.091908] env[61852]: DEBUG nova.virt.hardware [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1008.092039] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab7b502-7273-40cb-a1f9-d75d9b7f9376 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.100454] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb94cf8-7d96-4c1e-887b-4ab45757ecff {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.123922] env[61852]: DEBUG nova.compute.manager [req-2c3e344b-d87b-427a-9f2e-daa622e6fcd3 req-4b08a6fd-2912-4be9-a370-492b4f7a0bc1 service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Received event network-changed-155536cc-5884-4a09-8035-a2768a29dc81 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1008.124067] env[61852]: DEBUG nova.compute.manager [req-2c3e344b-d87b-427a-9f2e-daa622e6fcd3 req-4b08a6fd-2912-4be9-a370-492b4f7a0bc1 service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Refreshing instance network info cache due to event network-changed-155536cc-5884-4a09-8035-a2768a29dc81. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1008.124293] env[61852]: DEBUG oslo_concurrency.lockutils [req-2c3e344b-d87b-427a-9f2e-daa622e6fcd3 req-4b08a6fd-2912-4be9-a370-492b4f7a0bc1 service nova] Acquiring lock "refresh_cache-8d6dc967-ebe5-4573-b41a-5793f96b7eec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.124440] env[61852]: DEBUG oslo_concurrency.lockutils [req-2c3e344b-d87b-427a-9f2e-daa622e6fcd3 req-4b08a6fd-2912-4be9-a370-492b4f7a0bc1 service nova] Acquired lock "refresh_cache-8d6dc967-ebe5-4573-b41a-5793f96b7eec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.124713] env[61852]: DEBUG nova.network.neutron [req-2c3e344b-d87b-427a-9f2e-daa622e6fcd3 req-4b08a6fd-2912-4be9-a370-492b4f7a0bc1 service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Refreshing network info cache for port 155536cc-5884-4a09-8035-a2768a29dc81 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1008.231832] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ca513924-f110-40d6-a111-ce711dec3b58 tempest-AttachInterfacesTestJSON-673373864 tempest-AttachInterfacesTestJSON-673373864-project-member] Lock "4623565b-cd36-498c-a0e9-c3b1c6ef479b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.247s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.402111] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293311, 'name': CreateVM_Task, 'duration_secs': 0.346839} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.402663] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1008.403400] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.403574] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.405168] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1008.406838] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53a41846-57c8-47cb-8ce4-67854cdf02f0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.408483] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.411391] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 1008.411391] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5207ecaa-89bc-0b72-9153-f4a93b638b73" [ 1008.411391] env[61852]: _type = "Task" [ 1008.411391] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.419410] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5207ecaa-89bc-0b72-9153-f4a93b638b73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.458925] env[61852]: DEBUG nova.network.neutron [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Successfully updated port: 0846a1e1-631a-4825-96c0-abda4b16c822 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.689310] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "76fa1b27-bd1f-4794-a56b-88373e79db9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.689602] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "76fa1b27-bd1f-4794-a56b-88373e79db9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.689818] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "76fa1b27-bd1f-4794-a56b-88373e79db9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.690014] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "76fa1b27-bd1f-4794-a56b-88373e79db9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.690190] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "76fa1b27-bd1f-4794-a56b-88373e79db9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.692719] env[61852]: INFO nova.compute.manager [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Terminating instance [ 1008.695969] env[61852]: DEBUG nova.compute.manager [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1008.695969] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.695969] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbcc3388-e1fc-4998-b76c-c1756583b7ad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.703368] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.703629] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9eb0024-2b4b-4a07-aa23-8fd6d264e0ad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.710548] env[61852]: DEBUG oslo_vmware.api [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 1008.710548] env[61852]: value = "task-1293312" [ 1008.710548] env[61852]: _type = "Task" [ 1008.710548] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.718444] env[61852]: DEBUG oslo_vmware.api [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293312, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.905603] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.908887] env[61852]: DEBUG nova.network.neutron [req-2c3e344b-d87b-427a-9f2e-daa622e6fcd3 req-4b08a6fd-2912-4be9-a370-492b4f7a0bc1 service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Updated VIF entry in instance network info cache for port 155536cc-5884-4a09-8035-a2768a29dc81. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.909573] env[61852]: DEBUG nova.network.neutron [req-2c3e344b-d87b-427a-9f2e-daa622e6fcd3 req-4b08a6fd-2912-4be9-a370-492b4f7a0bc1 service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Updating instance_info_cache with network_info: [{"id": "155536cc-5884-4a09-8035-a2768a29dc81", "address": "fa:16:3e:e4:5f:61", "network": {"id": "66e1ee36-559a-4219-ab11-b6c5d9aeb20e", "bridge": "br-int", "label": "tempest-ServersTestJSON-206536995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dbb543c66364861bf5f437c8c33a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap155536cc-58", "ovs_interfaceid": "155536cc-5884-4a09-8035-a2768a29dc81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.921020] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5207ecaa-89bc-0b72-9153-f4a93b638b73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.961590] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquiring lock "refresh_cache-9db95089-9fd7-42e5-bbf3-64847642ade6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.961875] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquired lock "refresh_cache-9db95089-9fd7-42e5-bbf3-64847642ade6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.962170] env[61852]: DEBUG nova.network.neutron [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1009.221603] env[61852]: DEBUG oslo_vmware.api [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293312, 'name': PowerOffVM_Task, 'duration_secs': 0.302539} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.221972] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1009.222171] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.222442] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c362b36-504d-4e19-8eca-0473477911c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.309773] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.310070] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.310337] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleting the datastore file [datastore2] 76fa1b27-bd1f-4794-a56b-88373e79db9a {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.310681] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cb36692-5bbc-4cbf-89da-38c37e6f2194 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.317488] env[61852]: DEBUG oslo_vmware.api [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for the task: (returnval){ [ 1009.317488] env[61852]: value = "task-1293314" [ 1009.317488] env[61852]: _type = "Task" [ 1009.317488] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.325825] env[61852]: DEBUG oslo_vmware.api [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293314, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.406549] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.412265] env[61852]: DEBUG oslo_concurrency.lockutils [req-2c3e344b-d87b-427a-9f2e-daa622e6fcd3 req-4b08a6fd-2912-4be9-a370-492b4f7a0bc1 service nova] Releasing lock "refresh_cache-8d6dc967-ebe5-4573-b41a-5793f96b7eec" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.422845] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5207ecaa-89bc-0b72-9153-f4a93b638b73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.499918] env[61852]: DEBUG nova.network.neutron [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1009.645286] env[61852]: DEBUG nova.network.neutron [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Updating instance_info_cache with network_info: [{"id": "0846a1e1-631a-4825-96c0-abda4b16c822", "address": "fa:16:3e:70:b5:11", "network": {"id": "6f31e9d1-aa8e-45d5-bc78-6ee8cc9f0b76", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-751119227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68498fcf21ac47abab34e5809137c1aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97113f46-d648-4613-b233-069acba18198", "external-id": "nsx-vlan-transportzone-480", "segmentation_id": 480, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0846a1e1-63", "ovs_interfaceid": "0846a1e1-631a-4825-96c0-abda4b16c822", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.829887] env[61852]: DEBUG oslo_vmware.api [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Task: {'id': task-1293314, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196062} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.830218] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.830439] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1009.830659] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1009.830854] env[61852]: INFO nova.compute.manager [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1009.831151] env[61852]: DEBUG oslo.service.loopingcall [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1009.831399] env[61852]: DEBUG nova.compute.manager [-] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1009.831519] env[61852]: DEBUG nova.network.neutron [-] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.907737] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.921929] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5207ecaa-89bc-0b72-9153-f4a93b638b73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.148193] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Releasing lock "refresh_cache-9db95089-9fd7-42e5-bbf3-64847642ade6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.148554] env[61852]: DEBUG nova.compute.manager [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Instance network_info: |[{"id": "0846a1e1-631a-4825-96c0-abda4b16c822", "address": "fa:16:3e:70:b5:11", "network": {"id": "6f31e9d1-aa8e-45d5-bc78-6ee8cc9f0b76", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-751119227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68498fcf21ac47abab34e5809137c1aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97113f46-d648-4613-b233-069acba18198", "external-id": "nsx-vlan-transportzone-480", "segmentation_id": 480, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0846a1e1-63", "ovs_interfaceid": "0846a1e1-631a-4825-96c0-abda4b16c822", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1010.148997] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:b5:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '97113f46-d648-4613-b233-069acba18198', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0846a1e1-631a-4825-96c0-abda4b16c822', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.157587] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Creating folder: Project (68498fcf21ac47abab34e5809137c1aa). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1010.157888] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19813628-4095-4a75-afff-a0dee9150bdc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.164337] env[61852]: DEBUG nova.compute.manager [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Received event network-vif-plugged-0846a1e1-631a-4825-96c0-abda4b16c822 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1010.164578] env[61852]: DEBUG oslo_concurrency.lockutils [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] Acquiring lock "9db95089-9fd7-42e5-bbf3-64847642ade6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.164868] env[61852]: DEBUG oslo_concurrency.lockutils [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] Lock "9db95089-9fd7-42e5-bbf3-64847642ade6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.165071] env[61852]: DEBUG oslo_concurrency.lockutils [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] Lock "9db95089-9fd7-42e5-bbf3-64847642ade6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.165249] env[61852]: DEBUG nova.compute.manager [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] No waiting events found dispatching network-vif-plugged-0846a1e1-631a-4825-96c0-abda4b16c822 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1010.165415] env[61852]: WARNING nova.compute.manager [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Received unexpected event network-vif-plugged-0846a1e1-631a-4825-96c0-abda4b16c822 for instance with vm_state building and task_state spawning. [ 1010.165581] env[61852]: DEBUG nova.compute.manager [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Received event network-changed-0846a1e1-631a-4825-96c0-abda4b16c822 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1010.165738] env[61852]: DEBUG nova.compute.manager [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Refreshing instance network info cache due to event network-changed-0846a1e1-631a-4825-96c0-abda4b16c822. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1010.165944] env[61852]: DEBUG oslo_concurrency.lockutils [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] Acquiring lock "refresh_cache-9db95089-9fd7-42e5-bbf3-64847642ade6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.166131] env[61852]: DEBUG oslo_concurrency.lockutils [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] Acquired lock "refresh_cache-9db95089-9fd7-42e5-bbf3-64847642ade6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.166296] env[61852]: DEBUG nova.network.neutron [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Refreshing network info cache for port 0846a1e1-631a-4825-96c0-abda4b16c822 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.169324] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Created folder: Project (68498fcf21ac47abab34e5809137c1aa) in parent group-v277280. [ 1010.169512] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Creating folder: Instances. Parent ref: group-v277413. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1010.169755] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b5cc888-a5a8-4970-99fd-6f5bf412b43b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.179339] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Created folder: Instances in parent group-v277413. [ 1010.179507] env[61852]: DEBUG oslo.service.loopingcall [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.179723] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1010.179938] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-014be51b-3831-4eb8-aa54-e4e1a8131f39 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.201151] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.201151] env[61852]: value = "task-1293317" [ 1010.201151] env[61852]: _type = "Task" [ 1010.201151] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.209214] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293317, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.408873] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.422402] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5207ecaa-89bc-0b72-9153-f4a93b638b73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.568384] env[61852]: DEBUG nova.network.neutron [-] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.711534] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293317, 'name': CreateVM_Task, 'duration_secs': 0.373292} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.711732] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1010.712478] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.899098] env[61852]: DEBUG nova.network.neutron [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Updated VIF entry in instance network info cache for port 0846a1e1-631a-4825-96c0-abda4b16c822. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1010.899490] env[61852]: DEBUG nova.network.neutron [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Updating instance_info_cache with network_info: [{"id": "0846a1e1-631a-4825-96c0-abda4b16c822", "address": "fa:16:3e:70:b5:11", "network": {"id": "6f31e9d1-aa8e-45d5-bc78-6ee8cc9f0b76", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-751119227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68498fcf21ac47abab34e5809137c1aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97113f46-d648-4613-b233-069acba18198", "external-id": "nsx-vlan-transportzone-480", "segmentation_id": 480, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0846a1e1-63", "ovs_interfaceid": "0846a1e1-631a-4825-96c0-abda4b16c822", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.909744] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 3.750902} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.910106] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1010.910925] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ea9363-8cef-4b1e-b22c-e5e4167c6985 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.932424] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 33667154-991d-4a32-8f16-f292a4725e3e/33667154-991d-4a32-8f16-f292a4725e3e.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1010.933393] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bbff5b9-e51a-474a-adaf-7561fede824a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.952715] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5207ecaa-89bc-0b72-9153-f4a93b638b73, 'name': SearchDatastore_Task, 'duration_secs': 2.18163} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.953350] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.953596] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1010.953831] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.953982] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.954181] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1010.954464] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.954763] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1010.954991] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09dafc6d-96eb-4ff7-a691-6c0df977bb04 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.957553] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f283cd2f-14f6-4e6e-b1f5-86d94304d8d8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.959239] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1010.959239] env[61852]: value = "task-1293318" [ 1010.959239] env[61852]: _type = "Task" [ 1010.959239] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.963636] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for the task: (returnval){ [ 1010.963636] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ec78d4-5b34-415b-3ba6-8a740c20237b" [ 1010.963636] env[61852]: _type = "Task" [ 1010.963636] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.969576] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293318, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.970787] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1010.970960] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1010.971635] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e11b21f7-a2cb-473c-a91a-61b13ea175ac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.976026] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ec78d4-5b34-415b-3ba6-8a740c20237b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.979313] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 1010.979313] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52238518-aebc-4b4a-05b6-222644a6e7fe" [ 1010.979313] env[61852]: _type = "Task" [ 1010.979313] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.990772] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52238518-aebc-4b4a-05b6-222644a6e7fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.071089] env[61852]: INFO nova.compute.manager [-] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Took 1.24 seconds to deallocate network for instance. [ 1011.405731] env[61852]: DEBUG oslo_concurrency.lockutils [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] Releasing lock "refresh_cache-9db95089-9fd7-42e5-bbf3-64847642ade6" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.406097] env[61852]: DEBUG nova.compute.manager [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Received event network-vif-deleted-d573484a-4782-4aef-8fe9-088ba601cde5 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1011.406287] env[61852]: INFO nova.compute.manager [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Neutron deleted interface d573484a-4782-4aef-8fe9-088ba601cde5; detaching it from the instance and deleting it from the info cache [ 1011.406463] env[61852]: DEBUG nova.network.neutron [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.470432] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293318, 'name': ReconfigVM_Task, 'duration_secs': 0.424035} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.471016] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 33667154-991d-4a32-8f16-f292a4725e3e/33667154-991d-4a32-8f16-f292a4725e3e.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.471600] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c4cbd66-3c43-4ba1-95e9-ebe6625a5e18 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.476243] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52ec78d4-5b34-415b-3ba6-8a740c20237b, 'name': SearchDatastore_Task, 'duration_secs': 0.023101} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.476879] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.477020] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.477245] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.481126] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1011.481126] env[61852]: value = "task-1293319" [ 1011.481126] env[61852]: _type = "Task" [ 1011.481126] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.491502] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52238518-aebc-4b4a-05b6-222644a6e7fe, 'name': SearchDatastore_Task, 'duration_secs': 0.008998} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.494859] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293319, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.495080] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9e2783c-be45-433d-ba22-57f31606e889 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.499605] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 1011.499605] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c4a14e-3e6b-2e1a-f68c-8a1cacc1a5de" [ 1011.499605] env[61852]: _type = "Task" [ 1011.499605] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.507034] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c4a14e-3e6b-2e1a-f68c-8a1cacc1a5de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.577979] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.578347] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.578603] env[61852]: DEBUG nova.objects.instance [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lazy-loading 'resources' on Instance uuid 76fa1b27-bd1f-4794-a56b-88373e79db9a {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.909143] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e349dc6-54fd-41d0-abe4-1bd37176600c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.918607] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56dc852-ee9a-4405-99a1-93872f3652a3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.946601] env[61852]: DEBUG nova.compute.manager [req-b399d61e-43e8-48cd-86c6-150a060661dc req-7542c5c1-8b8c-467f-8d32-fd12cf271cb0 service nova] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Detach interface failed, port_id=d573484a-4782-4aef-8fe9-088ba601cde5, reason: Instance 76fa1b27-bd1f-4794-a56b-88373e79db9a could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1011.993601] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293319, 'name': Rename_Task, 'duration_secs': 0.149766} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.993838] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.994097] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f515a18c-daf5-45f4-abf8-b61a8d7300dd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.999815] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1011.999815] env[61852]: value = "task-1293320" [ 1011.999815] env[61852]: _type = "Task" [ 1011.999815] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.009547] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293320, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.012979] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52c4a14e-3e6b-2e1a-f68c-8a1cacc1a5de, 'name': SearchDatastore_Task, 'duration_secs': 0.014047} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.013254] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.013504] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 8d6dc967-ebe5-4573-b41a-5793f96b7eec/8d6dc967-ebe5-4573-b41a-5793f96b7eec.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.013772] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.013962] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1012.014191] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4d120db-cf1f-4977-90c4-c04cffd7d90a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.015874] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0f2c8e5-9eb1-44d2-be3c-2a8e98083fd7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.022358] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 1012.022358] env[61852]: value = "task-1293321" [ 1012.022358] env[61852]: _type = "Task" [ 1012.022358] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.026498] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1012.026680] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1012.027775] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b699af62-d239-4351-b6cb-e79578cc8002 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.032827] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.035686] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for the task: (returnval){ [ 1012.035686] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52904b82-f615-d24d-5ad9-516566c898a2" [ 1012.035686] env[61852]: _type = "Task" [ 1012.035686] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.043225] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52904b82-f615-d24d-5ad9-516566c898a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.165698] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aed0f84-7ca7-4fd0-b93d-8c4bf28556a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.173817] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d014d68-6fb3-4fca-9b2e-5ad40d4a0984 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.205661] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49da4dd8-52dc-4587-905c-7b943dd7bebd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.214084] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1442a97-bf2c-4d15-b19b-26557cc9fad0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.229765] env[61852]: DEBUG nova.compute.provider_tree [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.509688] env[61852]: DEBUG oslo_vmware.api [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293320, 'name': PowerOnVM_Task, 'duration_secs': 0.451499} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.509993] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.510204] env[61852]: INFO nova.compute.manager [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Took 8.78 seconds to spawn the instance on the hypervisor. [ 1012.510388] env[61852]: DEBUG nova.compute.manager [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1012.511215] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f390ba-eab6-4e38-a454-2148ba88e6f3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.530951] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293321, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.546055] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52904b82-f615-d24d-5ad9-516566c898a2, 'name': SearchDatastore_Task, 'duration_secs': 0.007992} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.546873] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07cd6253-56f4-4ea4-81d8-dc01a3d48090 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.552103] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for the task: (returnval){ [ 1012.552103] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5214ee6a-201f-141c-7933-99d9bb6362a5" [ 1012.552103] env[61852]: _type = "Task" [ 1012.552103] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.559360] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5214ee6a-201f-141c-7933-99d9bb6362a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.732877] env[61852]: DEBUG nova.scheduler.client.report [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1013.029870] env[61852]: INFO nova.compute.manager [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Took 19.56 seconds to build instance. [ 1013.035458] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293321, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513901} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.035458] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 8d6dc967-ebe5-4573-b41a-5793f96b7eec/8d6dc967-ebe5-4573-b41a-5793f96b7eec.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.035458] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.035458] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4fe96fb9-71a0-418b-bcbf-4beb6b6e45d3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.041995] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 1013.041995] env[61852]: value = "task-1293322" [ 1013.041995] env[61852]: _type = "Task" [ 1013.041995] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.052382] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293322, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.062258] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5214ee6a-201f-141c-7933-99d9bb6362a5, 'name': SearchDatastore_Task, 'duration_secs': 0.011231} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.062525] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.062782] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 9db95089-9fd7-42e5-bbf3-64847642ade6/9db95089-9fd7-42e5-bbf3-64847642ade6.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1013.063045] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a19beaca-6900-426e-bd48-2af822c63045 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.069694] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for the task: (returnval){ [ 1013.069694] env[61852]: value = "task-1293323" [ 1013.069694] env[61852]: _type = "Task" [ 1013.069694] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.078367] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.238295] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.266642] env[61852]: INFO nova.scheduler.client.report [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Deleted allocations for instance 76fa1b27-bd1f-4794-a56b-88373e79db9a [ 1013.532248] env[61852]: DEBUG oslo_concurrency.lockutils [None req-8916ba2c-cf41-4dfe-8147-1095761eed52 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "33667154-991d-4a32-8f16-f292a4725e3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.073s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.557028] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293322, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09448} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.557028] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1013.557028] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f66b60-266b-4b9d-8e86-e572c6a2ad10 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.582040] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 8d6dc967-ebe5-4573-b41a-5793f96b7eec/8d6dc967-ebe5-4573-b41a-5793f96b7eec.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1013.586557] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e616815-2211-4e17-b7f0-29965fbd0e56 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.607360] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293323, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524313} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.608687] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 9db95089-9fd7-42e5-bbf3-64847642ade6/9db95089-9fd7-42e5-bbf3-64847642ade6.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.609072] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.609457] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 1013.609457] env[61852]: value = "task-1293324" [ 1013.609457] env[61852]: _type = "Task" [ 1013.609457] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.609663] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c73b9184-dddb-4a78-9ccf-fd0a30f6f589 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.624151] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for the task: (returnval){ [ 1013.624151] env[61852]: value = "task-1293325" [ 1013.624151] env[61852]: _type = "Task" [ 1013.624151] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.626587] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293324, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.639087] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293325, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.775208] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9bfe8a5a-9f39-43dd-808a-4e98c5e846ab tempest-ServerDiskConfigTestJSON-1665070456 tempest-ServerDiskConfigTestJSON-1665070456-project-member] Lock "76fa1b27-bd1f-4794-a56b-88373e79db9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.085s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.122994] env[61852]: INFO nova.compute.manager [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Rebuilding instance [ 1014.125847] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293324, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.136388] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293325, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064559} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.136684] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1014.137755] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e496ac5a-5881-4fd4-aa6d-d80225325016 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.169433] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 9db95089-9fd7-42e5-bbf3-64847642ade6/9db95089-9fd7-42e5-bbf3-64847642ade6.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.173983] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86cc6390-783b-4799-87a2-1aee9e0844eb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.197108] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for the task: (returnval){ [ 1014.197108] env[61852]: value = "task-1293326" [ 1014.197108] env[61852]: _type = "Task" [ 1014.197108] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.207485] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293326, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.217527] env[61852]: DEBUG nova.compute.manager [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1014.218430] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f26ef25-6ae3-4aa9-9e7c-718bf8e78622 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.623668] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293324, 'name': ReconfigVM_Task, 'duration_secs': 0.909676} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.623999] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 8d6dc967-ebe5-4573-b41a-5793f96b7eec/8d6dc967-ebe5-4573-b41a-5793f96b7eec.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.624650] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d943997-cd38-4bca-9113-5a0c0e35ee6a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.630823] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 1014.630823] env[61852]: value = "task-1293327" [ 1014.630823] env[61852]: _type = "Task" [ 1014.630823] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.640831] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293327, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.705719] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293326, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.730194] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1014.730868] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0f4caee-800c-47bc-88ab-820d4c39924a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.739835] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1014.739835] env[61852]: value = "task-1293328" [ 1014.739835] env[61852]: _type = "Task" [ 1014.739835] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.749466] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293328, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.141514] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293327, 'name': Rename_Task, 'duration_secs': 0.175577} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.141826] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.142480] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ccf4f3a9-3601-40a8-8a97-639ad67e9880 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.147871] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 1015.147871] env[61852]: value = "task-1293332" [ 1015.147871] env[61852]: _type = "Task" [ 1015.147871] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.156760] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.206074] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293326, 'name': ReconfigVM_Task, 'duration_secs': 0.519339} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.208585] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 9db95089-9fd7-42e5-bbf3-64847642ade6/9db95089-9fd7-42e5-bbf3-64847642ade6.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.209639] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ad12450-8ea4-4eb0-92f3-8d25a4d970cb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.216129] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for the task: (returnval){ [ 1015.216129] env[61852]: value = "task-1293333" [ 1015.216129] env[61852]: _type = "Task" [ 1015.216129] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.225153] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293333, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.248774] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293328, 'name': PowerOffVM_Task, 'duration_secs': 0.113268} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.249872] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1015.250080] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1015.250892] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedb4c3c-cc2c-4e86-8481-dead112b6f7b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.257920] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1015.258182] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b08db9ff-403a-409c-a28f-7bd650457d0d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.278888] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1015.279164] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1015.279462] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Deleting the datastore file [datastore1] 33667154-991d-4a32-8f16-f292a4725e3e {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1015.279752] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fbc392c-b689-408b-97ec-804f02434638 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.286192] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1015.286192] env[61852]: value = "task-1293335" [ 1015.286192] env[61852]: _type = "Task" [ 1015.286192] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.294583] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.658432] env[61852]: DEBUG oslo_vmware.api [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293332, 'name': PowerOnVM_Task, 'duration_secs': 0.467542} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.658432] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1015.658432] env[61852]: INFO nova.compute.manager [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Took 9.77 seconds to spawn the instance on the hypervisor. [ 1015.658726] env[61852]: DEBUG nova.compute.manager [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1015.659341] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b86c30-569b-47a4-b7d1-7f4753624433 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.726291] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293333, 'name': Rename_Task, 'duration_secs': 0.148311} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.726598] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.726909] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76202fdf-bc7d-4a9d-a79d-053116a46aae {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.734472] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for the task: (returnval){ [ 1015.734472] env[61852]: value = "task-1293336" [ 1015.734472] env[61852]: _type = "Task" [ 1015.734472] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.746361] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293336, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.795833] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09248} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.796160] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1015.796308] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1015.796804] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1016.180276] env[61852]: INFO nova.compute.manager [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Took 22.27 seconds to build instance. [ 1016.244502] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293336, 'name': PowerOnVM_Task} progress is 71%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.683131] env[61852]: DEBUG oslo_concurrency.lockutils [None req-9206c5fb-29da-4097-97ee-4fb61c23d031 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.779s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.744486] env[61852]: DEBUG oslo_vmware.api [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293336, 'name': PowerOnVM_Task, 'duration_secs': 0.98861} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.744764] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.744973] env[61852]: INFO nova.compute.manager [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Took 8.68 seconds to spawn the instance on the hypervisor. [ 1016.745174] env[61852]: DEBUG nova.compute.manager [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1016.745924] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eadd92-30d5-47ee-9f60-49ee5a4c7a74 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.834392] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1016.834691] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1016.834887] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.835091] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1016.835265] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.835429] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1016.835661] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1016.835864] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1016.836067] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1016.836067] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1016.836067] env[61852]: DEBUG nova.virt.hardware [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1016.837615] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5408c68-7203-49b3-84a1-afe4b7b22063 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.846106] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6d14f9-426b-4a4a-8897-a74ab633427c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.860660] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.866712] env[61852]: DEBUG oslo.service.loopingcall [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1016.866999] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.867209] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-190b6efc-3982-49bd-bed5-0550ecb26a90 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.883674] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.883674] env[61852]: value = "task-1293337" [ 1016.883674] env[61852]: _type = "Task" [ 1016.883674] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.891589] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293337, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.262919] env[61852]: INFO nova.compute.manager [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Took 18.54 seconds to build instance. [ 1017.394090] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293337, 'name': CreateVM_Task, 'duration_secs': 0.283181} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.394268] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.394699] env[61852]: DEBUG oslo_concurrency.lockutils [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.394879] env[61852]: DEBUG oslo_concurrency.lockutils [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.395239] env[61852]: DEBUG oslo_concurrency.lockutils [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1017.395499] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67035f55-6a43-44cf-86e7-01ced46970a0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.400209] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1017.400209] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b5bc04-bc8b-26f3-8b42-59a91c07b6b3" [ 1017.400209] env[61852]: _type = "Task" [ 1017.400209] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.408584] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b5bc04-bc8b-26f3-8b42-59a91c07b6b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.467288] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.467564] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.467755] env[61852]: DEBUG nova.compute.manager [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1017.468652] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eec54d7-57c7-4e12-b050-603f7b018e64 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.474848] env[61852]: DEBUG nova.compute.manager [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61852) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1017.475422] env[61852]: DEBUG nova.objects.instance [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lazy-loading 'flavor' on Instance uuid 8d6dc967-ebe5-4573-b41a-5793f96b7eec {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.765071] env[61852]: DEBUG oslo_concurrency.lockutils [None req-10a59915-40a3-4039-8a82-52fc91516136 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lock "9db95089-9fd7-42e5-bbf3-64847642ade6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.058s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.914906] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52b5bc04-bc8b-26f3-8b42-59a91c07b6b3, 'name': SearchDatastore_Task, 'duration_secs': 0.009258} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.914906] env[61852]: DEBUG oslo_concurrency.lockutils [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.914906] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.914906] env[61852]: DEBUG oslo_concurrency.lockutils [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.914906] env[61852]: DEBUG oslo_concurrency.lockutils [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.914906] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.914906] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d13ff10-5451-48c5-8c94-a58a8db0425b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.923037] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.923037] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.923762] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fba42e41-bb81-4c46-a8c4-6791dea1d6a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.928960] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1017.928960] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f6893b-022e-a139-e19c-168b6b49c389" [ 1017.928960] env[61852]: _type = "Task" [ 1017.928960] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.938775] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f6893b-022e-a139-e19c-168b6b49c389, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.981303] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1017.981612] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71102eca-691d-4366-b8ff-b034d9010e61 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.988076] env[61852]: DEBUG oslo_vmware.api [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 1017.988076] env[61852]: value = "task-1293339" [ 1017.988076] env[61852]: _type = "Task" [ 1017.988076] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.998968] env[61852]: DEBUG oslo_vmware.api [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.439136] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52f6893b-022e-a139-e19c-168b6b49c389, 'name': SearchDatastore_Task, 'duration_secs': 0.013802} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.439950] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a0adc60-780f-473a-b358-9492d1fd61cb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.445129] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1018.445129] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52500051-73af-5bb7-5f82-7fd97c228ca6" [ 1018.445129] env[61852]: _type = "Task" [ 1018.445129] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.453901] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52500051-73af-5bb7-5f82-7fd97c228ca6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.497678] env[61852]: DEBUG oslo_vmware.api [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293339, 'name': PowerOffVM_Task, 'duration_secs': 0.419754} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.497962] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1018.498220] env[61852]: DEBUG nova.compute.manager [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1018.499049] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac200b47-2710-4b52-9442-f682f022a905 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.885567] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquiring lock "9db95089-9fd7-42e5-bbf3-64847642ade6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.885867] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lock "9db95089-9fd7-42e5-bbf3-64847642ade6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.886099] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquiring lock "9db95089-9fd7-42e5-bbf3-64847642ade6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.886330] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lock "9db95089-9fd7-42e5-bbf3-64847642ade6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.886466] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lock "9db95089-9fd7-42e5-bbf3-64847642ade6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.888709] env[61852]: INFO nova.compute.manager [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Terminating instance [ 1018.890465] env[61852]: DEBUG nova.compute.manager [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1018.890665] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1018.891589] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25f607a-e962-4208-af55-799ae6908248 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.901033] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.901300] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c880eb1-cfa5-4779-bd56-784abe073c1c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.907335] env[61852]: DEBUG oslo_vmware.api [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for the task: (returnval){ [ 1018.907335] env[61852]: value = "task-1293340" [ 1018.907335] env[61852]: _type = "Task" [ 1018.907335] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.915064] env[61852]: DEBUG oslo_vmware.api [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293340, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.956029] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52500051-73af-5bb7-5f82-7fd97c228ca6, 'name': SearchDatastore_Task, 'duration_secs': 0.018193} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.956249] env[61852]: DEBUG oslo_concurrency.lockutils [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.956516] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 33667154-991d-4a32-8f16-f292a4725e3e/33667154-991d-4a32-8f16-f292a4725e3e.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1018.956788] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e150ec90-bcf8-4ef0-87d3-c15af5b6352f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.963829] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1018.963829] env[61852]: value = "task-1293341" [ 1018.963829] env[61852]: _type = "Task" [ 1018.963829] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.973310] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293341, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.012665] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0f2985e4-a491-4e68-aa98-7dd22a730845 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.545s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.420154] env[61852]: DEBUG oslo_vmware.api [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293340, 'name': PowerOffVM_Task, 'duration_secs': 0.177398} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.420496] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1019.420680] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1019.420954] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1c5b987-7ff1-42dc-a47d-b714c337e0e2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.448684] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquiring lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.449010] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.474414] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293341, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.484419] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1019.484695] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1019.484886] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Deleting the datastore file [datastore1] 9db95089-9fd7-42e5-bbf3-64847642ade6 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1019.485570] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffdf99ba-0760-4864-9d3b-4b0a8b358bce {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.492641] env[61852]: DEBUG oslo_vmware.api [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for the task: (returnval){ [ 1019.492641] env[61852]: value = "task-1293343" [ 1019.492641] env[61852]: _type = "Task" [ 1019.492641] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.501113] env[61852]: DEBUG oslo_vmware.api [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293343, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.951172] env[61852]: DEBUG nova.compute.manager [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1019.974936] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293341, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569065} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.975235] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 33667154-991d-4a32-8f16-f292a4725e3e/33667154-991d-4a32-8f16-f292a4725e3e.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1019.975454] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1019.975711] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32eaa669-cdc6-4df6-bff1-4b061b858a89 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.982393] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1019.982393] env[61852]: value = "task-1293345" [ 1019.982393] env[61852]: _type = "Task" [ 1019.982393] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.991289] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293345, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.001648] env[61852]: DEBUG oslo_vmware.api [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Task: {'id': task-1293343, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198862} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.001911] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.002116] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1020.002301] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1020.002523] env[61852]: INFO nova.compute.manager [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1020.002819] env[61852]: DEBUG oslo.service.loopingcall [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.003028] env[61852]: DEBUG nova.compute.manager [-] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1020.003131] env[61852]: DEBUG nova.network.neutron [-] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1020.261455] env[61852]: DEBUG nova.compute.manager [req-ee6c4a06-476a-40e5-8474-070eeac57e9b req-ef067e1c-3f54-4372-8e3f-8701ef97dfa5 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Received event network-vif-deleted-0846a1e1-631a-4825-96c0-abda4b16c822 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1020.261666] env[61852]: INFO nova.compute.manager [req-ee6c4a06-476a-40e5-8474-070eeac57e9b req-ef067e1c-3f54-4372-8e3f-8701ef97dfa5 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Neutron deleted interface 0846a1e1-631a-4825-96c0-abda4b16c822; detaching it from the instance and deleting it from the info cache [ 1020.261837] env[61852]: DEBUG nova.network.neutron [req-ee6c4a06-476a-40e5-8474-070eeac57e9b req-ef067e1c-3f54-4372-8e3f-8701ef97dfa5 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.403195] env[61852]: DEBUG oslo_concurrency.lockutils [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.403700] env[61852]: DEBUG oslo_concurrency.lockutils [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.403941] env[61852]: DEBUG oslo_concurrency.lockutils [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.404168] env[61852]: DEBUG oslo_concurrency.lockutils [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.404351] env[61852]: DEBUG oslo_concurrency.lockutils [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.406339] env[61852]: INFO nova.compute.manager [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Terminating instance [ 1020.408413] env[61852]: DEBUG nova.compute.manager [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1020.408691] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.409550] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d36023f-ecca-41c7-8fb6-565254ead421 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.417951] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.418223] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-398f3372-3bed-4b51-a47e-0a815bde5b55 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.473535] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.473801] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.475319] env[61852]: INFO nova.compute.claims [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.491754] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293345, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066401} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.492017] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1020.492762] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e35e22-1215-44cd-a313-4c8976b04550 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.513069] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 33667154-991d-4a32-8f16-f292a4725e3e/33667154-991d-4a32-8f16-f292a4725e3e.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1020.513348] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20f7ff77-a447-4501-abc4-ac56b667ac2c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.532341] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1020.532341] env[61852]: value = "task-1293347" [ 1020.532341] env[61852]: _type = "Task" [ 1020.532341] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.539786] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293347, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.738356] env[61852]: DEBUG nova.network.neutron [-] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.765618] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-71f1dc5b-cae6-4241-8f01-48657fe48083 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.774964] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74cc9fb-f240-4eb8-b121-15434f0c7732 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.798811] env[61852]: DEBUG nova.compute.manager [req-ee6c4a06-476a-40e5-8474-070eeac57e9b req-ef067e1c-3f54-4372-8e3f-8701ef97dfa5 service nova] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Detach interface failed, port_id=0846a1e1-631a-4825-96c0-abda4b16c822, reason: Instance 9db95089-9fd7-42e5-bbf3-64847642ade6 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1021.042476] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293347, 'name': ReconfigVM_Task, 'duration_secs': 0.320218} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.042759] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 33667154-991d-4a32-8f16-f292a4725e3e/33667154-991d-4a32-8f16-f292a4725e3e.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1021.043384] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30a87532-f518-4c2d-9cc9-09c622c64390 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.049058] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1021.049058] env[61852]: value = "task-1293348" [ 1021.049058] env[61852]: _type = "Task" [ 1021.049058] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.056571] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293348, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.241327] env[61852]: INFO nova.compute.manager [-] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Took 1.24 seconds to deallocate network for instance. [ 1021.547294] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af78c7de-b533-4c62-9ac4-484a47ca8adc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.557562] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293348, 'name': Rename_Task, 'duration_secs': 0.132585} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.559284] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.559556] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11b0b215-ea48-4dca-8a67-4845c9be8d4b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.561568] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce62d60-4192-49e9-9e82-12867504632f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.592499] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313b99b0-77b2-4d26-ab6c-fbf239628b31 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.594947] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1021.594947] env[61852]: value = "task-1293349" [ 1021.594947] env[61852]: _type = "Task" [ 1021.594947] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.601172] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc872554-5828-4767-9780-51d0217f961e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.607742] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293349, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.617428] env[61852]: DEBUG nova.compute.provider_tree [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1021.748349] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.105672] env[61852]: DEBUG oslo_vmware.api [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293349, 'name': PowerOnVM_Task, 'duration_secs': 0.409535} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.105985] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1022.106235] env[61852]: DEBUG nova.compute.manager [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1022.107122] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa3d211-0a67-47a2-aa85-4769bdbccde9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.138017] env[61852]: ERROR nova.scheduler.client.report [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [req-df2c7103-7722-4552-ab35-f2509ce0224d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f818062c-7b17-4bd0-94af-192a674543c3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-df2c7103-7722-4552-ab35-f2509ce0224d"}]} [ 1022.157352] env[61852]: DEBUG nova.scheduler.client.report [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Refreshing inventories for resource provider f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1022.171553] env[61852]: DEBUG nova.scheduler.client.report [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Updating ProviderTree inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1022.171654] env[61852]: DEBUG nova.compute.provider_tree [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1022.183218] env[61852]: DEBUG nova.scheduler.client.report [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Refreshing aggregate associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, aggregates: None {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1022.202912] env[61852]: DEBUG nova.scheduler.client.report [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Refreshing trait associations for resource provider f818062c-7b17-4bd0-94af-192a674543c3, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61852) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1022.271017] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e3b0e6-3993-4cc1-952d-10c3e913457c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.276986] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1391026-a137-4a87-813d-9ef003e4dae4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.309883] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a588e8-858d-4a55-b360-0ff80aa3b360 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.317905] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7756c0bf-0ee9-435f-8fdd-9e8e10eb1edc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.331047] env[61852]: DEBUG nova.compute.provider_tree [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1022.624159] env[61852]: DEBUG oslo_concurrency.lockutils [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.842377] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1022.842613] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1022.842807] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleting the datastore file [datastore1] 8d6dc967-ebe5-4573-b41a-5793f96b7eec {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1022.843087] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb968b8e-6dbe-45a2-af8c-e914ea08ea7f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.848527] env[61852]: DEBUG oslo_vmware.api [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for the task: (returnval){ [ 1022.848527] env[61852]: value = "task-1293351" [ 1022.848527] env[61852]: _type = "Task" [ 1022.848527] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.856075] env[61852]: DEBUG oslo_vmware.api [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.862043] env[61852]: DEBUG nova.scheduler.client.report [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 130 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1022.862174] env[61852]: DEBUG nova.compute.provider_tree [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 130 to 131 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1022.862363] env[61852]: DEBUG nova.compute.provider_tree [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 138, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1023.256840] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "33667154-991d-4a32-8f16-f292a4725e3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.257283] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "33667154-991d-4a32-8f16-f292a4725e3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.257446] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "33667154-991d-4a32-8f16-f292a4725e3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.257635] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "33667154-991d-4a32-8f16-f292a4725e3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.257812] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "33667154-991d-4a32-8f16-f292a4725e3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.260119] env[61852]: INFO nova.compute.manager [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Terminating instance [ 1023.261787] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "refresh_cache-33667154-991d-4a32-8f16-f292a4725e3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.261947] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired lock "refresh_cache-33667154-991d-4a32-8f16-f292a4725e3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.262134] env[61852]: DEBUG nova.network.neutron [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1023.358019] env[61852]: DEBUG oslo_vmware.api [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Task: {'id': task-1293351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145108} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.358337] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1023.358539] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1023.358726] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1023.358902] env[61852]: INFO nova.compute.manager [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Took 2.95 seconds to destroy the instance on the hypervisor. [ 1023.359172] env[61852]: DEBUG oslo.service.loopingcall [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.359374] env[61852]: DEBUG nova.compute.manager [-] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1023.359550] env[61852]: DEBUG nova.network.neutron [-] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1023.367185] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.893s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.367705] env[61852]: DEBUG nova.compute.manager [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1023.370870] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.623s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.371116] env[61852]: DEBUG nova.objects.instance [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lazy-loading 'resources' on Instance uuid 9db95089-9fd7-42e5-bbf3-64847642ade6 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.620247] env[61852]: DEBUG nova.compute.manager [req-0d39debe-4a0c-408f-a6f1-035d25135321 req-40ae5a24-3937-4a65-bfab-98c4adc98f20 service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Received event network-vif-deleted-155536cc-5884-4a09-8035-a2768a29dc81 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1023.620640] env[61852]: INFO nova.compute.manager [req-0d39debe-4a0c-408f-a6f1-035d25135321 req-40ae5a24-3937-4a65-bfab-98c4adc98f20 service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Neutron deleted interface 155536cc-5884-4a09-8035-a2768a29dc81; detaching it from the instance and deleting it from the info cache [ 1023.620715] env[61852]: DEBUG nova.network.neutron [req-0d39debe-4a0c-408f-a6f1-035d25135321 req-40ae5a24-3937-4a65-bfab-98c4adc98f20 service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.779182] env[61852]: DEBUG nova.network.neutron [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1023.825428] env[61852]: DEBUG nova.network.neutron [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.873940] env[61852]: DEBUG nova.compute.utils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1023.878991] env[61852]: DEBUG nova.compute.manager [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1023.878991] env[61852]: DEBUG nova.network.neutron [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1023.927447] env[61852]: DEBUG nova.policy [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ced3a12601fd4d2ea075bd92b5a444d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cda1365a8c014771b0627254d322c3bb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 1023.954533] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424f4a6b-daa0-4f2f-b610-27a94010a010 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.961503] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c094ad6-77b7-4ede-99e3-8e698319d027 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.990815] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ebf9be-6d17-4439-96b0-37a411f2bcbf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.997709] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c6e1e0-8c4d-4735-9080-f45706cb8151 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.011191] env[61852]: DEBUG nova.compute.provider_tree [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1024.096758] env[61852]: DEBUG nova.network.neutron [-] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.123936] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7b8be23-1fbe-4a79-b1f1-5f609d065608 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.133047] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba82f3e-a742-4d14-83cb-2b38116e3bef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.164580] env[61852]: DEBUG nova.compute.manager [req-0d39debe-4a0c-408f-a6f1-035d25135321 req-40ae5a24-3937-4a65-bfab-98c4adc98f20 service nova] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Detach interface failed, port_id=155536cc-5884-4a09-8035-a2768a29dc81, reason: Instance 8d6dc967-ebe5-4573-b41a-5793f96b7eec could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1024.185205] env[61852]: DEBUG nova.network.neutron [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Successfully created port: 9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1024.327855] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Releasing lock "refresh_cache-33667154-991d-4a32-8f16-f292a4725e3e" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.328439] env[61852]: DEBUG nova.compute.manager [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1024.328657] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1024.329594] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd68f3e9-a17f-4e11-b286-fd3925ca425b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.339362] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1024.339683] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11b29cc8-64a1-469a-9ca6-e668f2b7c28c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.349226] env[61852]: DEBUG oslo_vmware.api [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1024.349226] env[61852]: value = "task-1293353" [ 1024.349226] env[61852]: _type = "Task" [ 1024.349226] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.360254] env[61852]: DEBUG oslo_vmware.api [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.379034] env[61852]: DEBUG nova.compute.manager [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1024.549306] env[61852]: DEBUG nova.scheduler.client.report [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Updated inventory for provider f818062c-7b17-4bd0-94af-192a674543c3 with generation 131 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1024.549832] env[61852]: DEBUG nova.compute.provider_tree [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Updating resource provider f818062c-7b17-4bd0-94af-192a674543c3 generation from 131 to 132 during operation: update_inventory {{(pid=61852) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1024.550170] env[61852]: DEBUG nova.compute.provider_tree [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Updating inventory in ProviderTree for provider f818062c-7b17-4bd0-94af-192a674543c3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1024.599424] env[61852]: INFO nova.compute.manager [-] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Took 1.24 seconds to deallocate network for instance. [ 1024.861319] env[61852]: DEBUG oslo_vmware.api [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293353, 'name': PowerOffVM_Task, 'duration_secs': 0.254753} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.861685] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1024.861931] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1024.862271] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fd6a697-7eac-4c8b-a369-cab69317d916 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.888778] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1024.889173] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1024.889407] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Deleting the datastore file [datastore1] 33667154-991d-4a32-8f16-f292a4725e3e {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1024.889900] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eeee8819-33e1-4dd3-8183-7277a77ae332 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.896619] env[61852]: DEBUG oslo_vmware.api [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1024.896619] env[61852]: value = "task-1293355" [ 1024.896619] env[61852]: _type = "Task" [ 1024.896619] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.904991] env[61852]: DEBUG oslo_vmware.api [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.056033] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.685s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.059069] env[61852]: DEBUG oslo_concurrency.lockutils [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.435s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.059167] env[61852]: DEBUG nova.objects.instance [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61852) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1025.079687] env[61852]: INFO nova.scheduler.client.report [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Deleted allocations for instance 9db95089-9fd7-42e5-bbf3-64847642ade6 [ 1025.105823] env[61852]: DEBUG oslo_concurrency.lockutils [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.392067] env[61852]: DEBUG nova.compute.manager [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1025.407364] env[61852]: DEBUG oslo_vmware.api [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115153} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.407637] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1025.407826] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1025.408015] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1025.408206] env[61852]: INFO nova.compute.manager [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1025.408447] env[61852]: DEBUG oslo.service.loopingcall [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1025.408639] env[61852]: DEBUG nova.compute.manager [-] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1025.408733] env[61852]: DEBUG nova.network.neutron [-] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1025.418169] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1025.418468] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1025.418637] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1025.418827] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1025.418979] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1025.419147] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1025.419362] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1025.419529] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1025.419701] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1025.419869] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1025.420059] env[61852]: DEBUG nova.virt.hardware [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1025.420839] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdab2ecd-b263-40b5-b76f-ce92ca94d44c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.424907] env[61852]: DEBUG nova.network.neutron [-] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1025.429009] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d39830-8885-45b9-8af3-35595761c09a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.588688] env[61852]: DEBUG oslo_concurrency.lockutils [None req-4540d25f-6ee5-4b03-bb9c-710c18244d88 tempest-ServerMetadataTestJSON-306644247 tempest-ServerMetadataTestJSON-306644247-project-member] Lock "9db95089-9fd7-42e5-bbf3-64847642ade6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.703s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.643607] env[61852]: DEBUG nova.compute.manager [req-b154f1d0-ead7-4487-833a-965ca211d0d6 req-60940831-d97e-4d34-acc6-40411bac87e2 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Received event network-vif-plugged-9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1025.643950] env[61852]: DEBUG oslo_concurrency.lockutils [req-b154f1d0-ead7-4487-833a-965ca211d0d6 req-60940831-d97e-4d34-acc6-40411bac87e2 service nova] Acquiring lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.644267] env[61852]: DEBUG oslo_concurrency.lockutils [req-b154f1d0-ead7-4487-833a-965ca211d0d6 req-60940831-d97e-4d34-acc6-40411bac87e2 service nova] Lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.644393] env[61852]: DEBUG oslo_concurrency.lockutils [req-b154f1d0-ead7-4487-833a-965ca211d0d6 req-60940831-d97e-4d34-acc6-40411bac87e2 service nova] Lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.644551] env[61852]: DEBUG nova.compute.manager [req-b154f1d0-ead7-4487-833a-965ca211d0d6 req-60940831-d97e-4d34-acc6-40411bac87e2 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] No waiting events found dispatching network-vif-plugged-9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1025.644751] env[61852]: WARNING nova.compute.manager [req-b154f1d0-ead7-4487-833a-965ca211d0d6 req-60940831-d97e-4d34-acc6-40411bac87e2 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Received unexpected event network-vif-plugged-9cbc23d9-a543-40b1-ad2b-389d5ebe78be for instance with vm_state building and task_state spawning. [ 1025.682892] env[61852]: DEBUG nova.network.neutron [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Successfully updated port: 9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1025.930284] env[61852]: DEBUG nova.network.neutron [-] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.070277] env[61852]: DEBUG oslo_concurrency.lockutils [None req-931b1d4b-06ea-4ed5-bcec-8ef400109107 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.071813] env[61852]: DEBUG oslo_concurrency.lockutils [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.966s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.072066] env[61852]: DEBUG nova.objects.instance [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lazy-loading 'resources' on Instance uuid 8d6dc967-ebe5-4573-b41a-5793f96b7eec {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.185853] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquiring lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.186035] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquired lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.186217] env[61852]: DEBUG nova.network.neutron [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1026.431487] env[61852]: INFO nova.compute.manager [-] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Took 1.02 seconds to deallocate network for instance. [ 1026.632321] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86ccd0b-4430-4b91-a7d8-fe335b2ff0d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.640099] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d041a4f-6930-4afb-b094-c0e1d2a431fb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.669797] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9cee5a-d6e5-4664-8488-df8090341a91 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.677081] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3256d380-2aa0-4c8b-a103-57ca10dec5e3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.692972] env[61852]: DEBUG nova.compute.provider_tree [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.744922] env[61852]: DEBUG nova.network.neutron [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1026.862494] env[61852]: DEBUG nova.network.neutron [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updating instance_info_cache with network_info: [{"id": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "address": "fa:16:3e:25:d9:2f", "network": {"id": "15708ffb-fab5-4bb1-b3c6-48dfd8fba2dd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-334319416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cda1365a8c014771b0627254d322c3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbc23d9-a5", "ovs_interfaceid": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.938216] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.197280] env[61852]: DEBUG nova.scheduler.client.report [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1027.365410] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Releasing lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.365736] env[61852]: DEBUG nova.compute.manager [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Instance network_info: |[{"id": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "address": "fa:16:3e:25:d9:2f", "network": {"id": "15708ffb-fab5-4bb1-b3c6-48dfd8fba2dd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-334319416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cda1365a8c014771b0627254d322c3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbc23d9-a5", "ovs_interfaceid": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1027.366188] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:d9:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19671de9-8b5b-4710-adc3-7419f3c0f171', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cbc23d9-a543-40b1-ad2b-389d5ebe78be', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1027.373474] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Creating folder: Project (cda1365a8c014771b0627254d322c3bb). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1027.373756] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b86e5649-3b26-4c9e-9a3b-621959124024 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.387163] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Created folder: Project (cda1365a8c014771b0627254d322c3bb) in parent group-v277280. [ 1027.387390] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Creating folder: Instances. Parent ref: group-v277421. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1027.387639] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb6b012b-5e66-4ab6-87fe-4cb355342865 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.398095] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Created folder: Instances in parent group-v277421. [ 1027.398329] env[61852]: DEBUG oslo.service.loopingcall [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.398518] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1027.398726] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-662f2f4b-60d2-493d-aaf0-d360a79b71f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.418383] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1027.418383] env[61852]: value = "task-1293359" [ 1027.418383] env[61852]: _type = "Task" [ 1027.418383] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.425940] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293359, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.671016] env[61852]: DEBUG nova.compute.manager [req-2797843f-7615-4f56-8ae2-75bec82dac57 req-f7f59f81-b169-4a3f-8699-ef2ae5b2f157 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Received event network-changed-9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1027.671287] env[61852]: DEBUG nova.compute.manager [req-2797843f-7615-4f56-8ae2-75bec82dac57 req-f7f59f81-b169-4a3f-8699-ef2ae5b2f157 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Refreshing instance network info cache due to event network-changed-9cbc23d9-a543-40b1-ad2b-389d5ebe78be. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1027.671524] env[61852]: DEBUG oslo_concurrency.lockutils [req-2797843f-7615-4f56-8ae2-75bec82dac57 req-f7f59f81-b169-4a3f-8699-ef2ae5b2f157 service nova] Acquiring lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.671706] env[61852]: DEBUG oslo_concurrency.lockutils [req-2797843f-7615-4f56-8ae2-75bec82dac57 req-f7f59f81-b169-4a3f-8699-ef2ae5b2f157 service nova] Acquired lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.671881] env[61852]: DEBUG nova.network.neutron [req-2797843f-7615-4f56-8ae2-75bec82dac57 req-f7f59f81-b169-4a3f-8699-ef2ae5b2f157 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Refreshing network info cache for port 9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1027.700270] env[61852]: DEBUG oslo_concurrency.lockutils [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.702801] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.765s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.703105] env[61852]: DEBUG nova.objects.instance [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lazy-loading 'resources' on Instance uuid 33667154-991d-4a32-8f16-f292a4725e3e {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.722737] env[61852]: INFO nova.scheduler.client.report [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Deleted allocations for instance 8d6dc967-ebe5-4573-b41a-5793f96b7eec [ 1027.928669] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293359, 'name': CreateVM_Task, 'duration_secs': 0.293366} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.928856] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1027.929588] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.929725] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.930092] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1027.930381] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b77af96-cbb9-47f5-ba5d-c3ded8f58517 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.934573] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1027.934573] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233fbad-51df-4c8f-8c2d-8d45c28e8500" [ 1027.934573] env[61852]: _type = "Task" [ 1027.934573] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.942066] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233fbad-51df-4c8f-8c2d-8d45c28e8500, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.231091] env[61852]: DEBUG oslo_concurrency.lockutils [None req-40dcc070-5cca-438e-8431-39271fb963b7 tempest-ServersTestJSON-848945246 tempest-ServersTestJSON-848945246-project-member] Lock "8d6dc967-ebe5-4573-b41a-5793f96b7eec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.827s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.264354] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6dad82-bd36-479b-ae48-93283e1aec48 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.272028] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc18518-7ff6-402e-847f-84f3112c1e7c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.303274] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb8d639-d8b0-427e-8fcc-0e10a6e4e8c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.311320] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ee2afd-fcac-49d7-8bca-0ad2f7dcdf11 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.324918] env[61852]: DEBUG nova.compute.provider_tree [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.444955] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5233fbad-51df-4c8f-8c2d-8d45c28e8500, 'name': SearchDatastore_Task, 'duration_secs': 0.011374} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.445325] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.445574] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1028.445813] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.445964] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.446160] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1028.446440] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ecbf764-c9d6-4302-b678-ce6e0dfef175 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.453791] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1028.453967] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1028.454667] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d88c69cb-b060-43ff-bbc1-181435ecb7bf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.459378] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1028.459378] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d47d12-6a65-3299-2268-334be539e440" [ 1028.459378] env[61852]: _type = "Task" [ 1028.459378] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.466501] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d47d12-6a65-3299-2268-334be539e440, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.655186] env[61852]: DEBUG nova.network.neutron [req-2797843f-7615-4f56-8ae2-75bec82dac57 req-f7f59f81-b169-4a3f-8699-ef2ae5b2f157 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updated VIF entry in instance network info cache for port 9cbc23d9-a543-40b1-ad2b-389d5ebe78be. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1028.655582] env[61852]: DEBUG nova.network.neutron [req-2797843f-7615-4f56-8ae2-75bec82dac57 req-f7f59f81-b169-4a3f-8699-ef2ae5b2f157 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updating instance_info_cache with network_info: [{"id": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "address": "fa:16:3e:25:d9:2f", "network": {"id": "15708ffb-fab5-4bb1-b3c6-48dfd8fba2dd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-334319416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cda1365a8c014771b0627254d322c3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbc23d9-a5", "ovs_interfaceid": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.828586] env[61852]: DEBUG nova.scheduler.client.report [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1028.970023] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52d47d12-6a65-3299-2268-334be539e440, 'name': SearchDatastore_Task, 'duration_secs': 0.008426} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.970875] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94a6dd7a-abc3-40d9-81fc-9a6d2b87dc02 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.976030] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1028.976030] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52564ab8-acb0-7448-8d46-3b1afc5a69a8" [ 1028.976030] env[61852]: _type = "Task" [ 1028.976030] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.984783] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52564ab8-acb0-7448-8d46-3b1afc5a69a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.160470] env[61852]: DEBUG oslo_concurrency.lockutils [req-2797843f-7615-4f56-8ae2-75bec82dac57 req-f7f59f81-b169-4a3f-8699-ef2ae5b2f157 service nova] Releasing lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.340741] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.638s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.367878] env[61852]: INFO nova.scheduler.client.report [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Deleted allocations for instance 33667154-991d-4a32-8f16-f292a4725e3e [ 1029.488479] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52564ab8-acb0-7448-8d46-3b1afc5a69a8, 'name': SearchDatastore_Task, 'duration_secs': 0.009533} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.488754] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.489203] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca/7601ebe5-ff7a-4bdf-b64a-a5b2de069bca.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1029.489329] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-405fcbf6-0a3f-4788-bdc0-02850fa4a58d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.496802] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1029.496802] env[61852]: value = "task-1293361" [ 1029.496802] env[61852]: _type = "Task" [ 1029.496802] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.509658] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293361, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.879291] env[61852]: DEBUG oslo_concurrency.lockutils [None req-3c6c2f33-d0e3-49d2-96ae-ab3af589f6fb tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "33667154-991d-4a32-8f16-f292a4725e3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.620s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.007199] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293361, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.509355] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293361, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51789} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.509715] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca/7601ebe5-ff7a-4bdf-b64a-a5b2de069bca.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1030.510289] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1030.510617] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86b926a2-3d89-4e7d-b1d6-f027131405a5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.521216] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1030.521216] env[61852]: value = "task-1293363" [ 1030.521216] env[61852]: _type = "Task" [ 1030.521216] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.533331] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.933660] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "3ae6fdae-3246-4607-b15d-c320c4dc816b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.933660] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "3ae6fdae-3246-4607-b15d-c320c4dc816b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.933975] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "3ae6fdae-3246-4607-b15d-c320c4dc816b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.933975] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "3ae6fdae-3246-4607-b15d-c320c4dc816b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.934191] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "3ae6fdae-3246-4607-b15d-c320c4dc816b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.936989] env[61852]: INFO nova.compute.manager [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Terminating instance [ 1030.939052] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "refresh_cache-3ae6fdae-3246-4607-b15d-c320c4dc816b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.939747] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquired lock "refresh_cache-3ae6fdae-3246-4607-b15d-c320c4dc816b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.939747] env[61852]: DEBUG nova.network.neutron [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.031241] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065452} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.031570] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.032413] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efcd85c-32df-4e29-8914-3f22fff2ce1e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.057610] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca/7601ebe5-ff7a-4bdf-b64a-a5b2de069bca.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.057610] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-665da24b-27f6-49b4-8c91-d9eea9ae126f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.077022] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1031.077022] env[61852]: value = "task-1293364" [ 1031.077022] env[61852]: _type = "Task" [ 1031.077022] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.088259] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293364, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.459438] env[61852]: DEBUG nova.network.neutron [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.513602] env[61852]: DEBUG nova.network.neutron [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.587435] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293364, 'name': ReconfigVM_Task, 'duration_secs': 0.27009} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.587753] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca/7601ebe5-ff7a-4bdf-b64a-a5b2de069bca.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.588733] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a43067f-6a8e-4178-a436-f9fa0e346ed1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.595579] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1031.595579] env[61852]: value = "task-1293365" [ 1031.595579] env[61852]: _type = "Task" [ 1031.595579] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.609170] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293365, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.803920] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.804272] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.016690] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Releasing lock "refresh_cache-3ae6fdae-3246-4607-b15d-c320c4dc816b" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.017159] env[61852]: DEBUG nova.compute.manager [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1032.017427] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1032.018335] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299c54cc-bec0-4ed2-b24d-a12ad94ce422 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.025958] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.026285] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1379359c-c800-4f09-879f-a39f0800c766 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.032617] env[61852]: DEBUG oslo_vmware.api [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1032.032617] env[61852]: value = "task-1293366" [ 1032.032617] env[61852]: _type = "Task" [ 1032.032617] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.040704] env[61852]: DEBUG oslo_vmware.api [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293366, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.104751] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293365, 'name': Rename_Task, 'duration_secs': 0.145743} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.105054] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.105360] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13e8a7fb-6067-4cca-91a8-1d4d4223fb01 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.113770] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1032.113770] env[61852]: value = "task-1293367" [ 1032.113770] env[61852]: _type = "Task" [ 1032.113770] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.122227] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293367, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.306869] env[61852]: DEBUG nova.compute.manager [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1032.542938] env[61852]: DEBUG oslo_vmware.api [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293366, 'name': PowerOffVM_Task, 'duration_secs': 0.113467} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.543337] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1032.543628] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1032.543959] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa175e6a-f9db-4379-8a8e-5b85ec86ad87 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.568325] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1032.568642] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1032.568927] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Deleting the datastore file [datastore2] 3ae6fdae-3246-4607-b15d-c320c4dc816b {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1032.569269] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20806cde-2b3b-4408-b74b-e3334270b426 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.575785] env[61852]: DEBUG oslo_vmware.api [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for the task: (returnval){ [ 1032.575785] env[61852]: value = "task-1293369" [ 1032.575785] env[61852]: _type = "Task" [ 1032.575785] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.587952] env[61852]: DEBUG oslo_vmware.api [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293369, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.625780] env[61852]: DEBUG oslo_vmware.api [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293367, 'name': PowerOnVM_Task, 'duration_secs': 0.493835} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.626011] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1032.626229] env[61852]: INFO nova.compute.manager [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Took 7.23 seconds to spawn the instance on the hypervisor. [ 1032.626415] env[61852]: DEBUG nova.compute.manager [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1032.627249] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f16d290-3975-4123-ade7-c798eb86eaab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.832473] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.832760] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.834346] env[61852]: INFO nova.compute.claims [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1033.086511] env[61852]: DEBUG oslo_vmware.api [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Task: {'id': task-1293369, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132688} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.088706] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1033.088968] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1033.089184] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1033.089370] env[61852]: INFO nova.compute.manager [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1033.090019] env[61852]: DEBUG oslo.service.loopingcall [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.090019] env[61852]: DEBUG nova.compute.manager [-] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1033.090019] env[61852]: DEBUG nova.network.neutron [-] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1033.105929] env[61852]: DEBUG nova.network.neutron [-] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1033.143893] env[61852]: INFO nova.compute.manager [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Took 12.69 seconds to build instance. [ 1033.609209] env[61852]: DEBUG nova.network.neutron [-] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.646050] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b9bc9dec-5555-4aed-be4f-fc2935543677 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.197s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.908287] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5072e2-3625-4361-829f-1c8934e5289a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.915991] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a00fe81-bf27-4d6e-9bcd-c4fb93034a1b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.949860] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3819ec-e7c5-4653-b23e-5da6560816ef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.958777] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd2c736-22fb-4330-bb0d-231c3241329b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.972575] env[61852]: DEBUG nova.compute.provider_tree [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.107731] env[61852]: INFO nova.compute.manager [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Rescuing [ 1034.108090] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquiring lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.108215] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquired lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.108464] env[61852]: DEBUG nova.network.neutron [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.110755] env[61852]: INFO nova.compute.manager [-] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Took 1.02 seconds to deallocate network for instance. [ 1034.446031] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Acquiring lock "25892f5e-147c-49b7-8009-60755a82a840" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.446291] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lock "25892f5e-147c-49b7-8009-60755a82a840" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.475653] env[61852]: DEBUG nova.scheduler.client.report [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1034.616557] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.796154] env[61852]: DEBUG nova.network.neutron [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updating instance_info_cache with network_info: [{"id": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "address": "fa:16:3e:25:d9:2f", "network": {"id": "15708ffb-fab5-4bb1-b3c6-48dfd8fba2dd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-334319416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cda1365a8c014771b0627254d322c3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbc23d9-a5", "ovs_interfaceid": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.949031] env[61852]: DEBUG nova.compute.manager [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1034.981153] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.147s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.981153] env[61852]: DEBUG nova.compute.manager [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1034.987594] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.367s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.987594] env[61852]: DEBUG nova.objects.instance [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lazy-loading 'resources' on Instance uuid 3ae6fdae-3246-4607-b15d-c320c4dc816b {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.028262] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "ad917577-5285-4f8d-8096-d83424deba33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.028577] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.299622] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Releasing lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.478989] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.487731] env[61852]: DEBUG nova.compute.utils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1035.492656] env[61852]: DEBUG nova.compute.manager [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1035.492656] env[61852]: DEBUG nova.network.neutron [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.530818] env[61852]: DEBUG nova.policy [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76b12b5f5eb843418b31ed30f6f5520c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a10be4b0f16c432c87b39b211fbf2fee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 1035.532344] env[61852]: DEBUG nova.compute.manager [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1035.565556] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdb396f-9e7d-41c2-bad3-b7fca29b2ded {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.576283] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a7fe07-dcaf-447b-88d4-f28e8c5a7cd0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.608618] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1ec571-1b6e-43e5-b19b-18d359c45122 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.616277] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54efd448-2531-4f2a-9ad4-655c60e69b1f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.630536] env[61852]: DEBUG nova.compute.provider_tree [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.808399] env[61852]: DEBUG nova.network.neutron [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Successfully created port: 537951d4-2e0a-45fd-a9eb-39ddf930b39d {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.831669] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.831996] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0cff68f-8521-4e10-a181-7e90fd354290 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.840456] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1035.840456] env[61852]: value = "task-1293370" [ 1035.840456] env[61852]: _type = "Task" [ 1035.840456] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.849297] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.993757] env[61852]: DEBUG nova.compute.manager [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1036.057051] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.133500] env[61852]: DEBUG nova.scheduler.client.report [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1036.349959] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293370, 'name': PowerOffVM_Task, 'duration_secs': 0.210558} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.350277] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.351057] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e9d852-8d10-4a13-858d-9b6b7f7c6649 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.368700] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2cf4b2-f082-4e55-801a-18b6b8d2f75e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.399961] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.400342] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3314486f-24df-46f2-886b-80ddd57c316f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.407015] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1036.407015] env[61852]: value = "task-1293371" [ 1036.407015] env[61852]: _type = "Task" [ 1036.407015] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.416601] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] VM already powered off {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1036.416809] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.417066] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.417260] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.417444] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.417675] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e4a7e92-6c4e-40b7-a66c-202784b5d7bb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.425263] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.425452] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.426184] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b917004-a339-4b75-8af6-a7ab4f2f662e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.431125] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1036.431125] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527dd99d-5a8c-1bbc-137f-1df8ad15affc" [ 1036.431125] env[61852]: _type = "Task" [ 1036.431125] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.438068] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527dd99d-5a8c-1bbc-137f-1df8ad15affc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.521165] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1036.521444] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1036.521654] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1036.638613] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.655s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.641181] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.162s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.642774] env[61852]: INFO nova.compute.claims [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1036.656825] env[61852]: INFO nova.scheduler.client.report [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Deleted allocations for instance 3ae6fdae-3246-4607-b15d-c320c4dc816b [ 1036.941703] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]527dd99d-5a8c-1bbc-137f-1df8ad15affc, 'name': SearchDatastore_Task, 'duration_secs': 0.008445} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.942917] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-946ba81a-a231-417d-9835-9036c9a47ab7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.947788] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1036.947788] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52893ce1-e501-d111-a475-273f210fc165" [ 1036.947788] env[61852]: _type = "Task" [ 1036.947788] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.955534] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52893ce1-e501-d111-a475-273f210fc165, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.002320] env[61852]: DEBUG nova.compute.manager [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1037.024617] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1037.024617] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1037.024617] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.024617] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1037.024915] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.024915] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1037.025127] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1037.025352] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1037.025551] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1037.025718] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1037.026116] env[61852]: DEBUG nova.virt.hardware [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1037.029270] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c49839a-e9b3-4087-b823-69bbca1af7c8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.038034] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b81c080-2f48-4e4e-9486-8b9c6496c9c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.163378] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e77e4daa-e6fd-4a6b-9f56-7fd00790ce01 tempest-ServerShowV247Test-409016066 tempest-ServerShowV247Test-409016066-project-member] Lock "3ae6fdae-3246-4607-b15d-c320c4dc816b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.230s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.248641] env[61852]: DEBUG nova.compute.manager [req-f149389a-c865-4b6c-b71a-5348af06e6cc req-94118293-89f7-4654-a7ce-c0dacacaaa2d service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Received event network-vif-plugged-537951d4-2e0a-45fd-a9eb-39ddf930b39d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1037.248875] env[61852]: DEBUG oslo_concurrency.lockutils [req-f149389a-c865-4b6c-b71a-5348af06e6cc req-94118293-89f7-4654-a7ce-c0dacacaaa2d service nova] Acquiring lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.249158] env[61852]: DEBUG oslo_concurrency.lockutils [req-f149389a-c865-4b6c-b71a-5348af06e6cc req-94118293-89f7-4654-a7ce-c0dacacaaa2d service nova] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.249305] env[61852]: DEBUG oslo_concurrency.lockutils [req-f149389a-c865-4b6c-b71a-5348af06e6cc req-94118293-89f7-4654-a7ce-c0dacacaaa2d service nova] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.249485] env[61852]: DEBUG nova.compute.manager [req-f149389a-c865-4b6c-b71a-5348af06e6cc req-94118293-89f7-4654-a7ce-c0dacacaaa2d service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] No waiting events found dispatching network-vif-plugged-537951d4-2e0a-45fd-a9eb-39ddf930b39d {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1037.249673] env[61852]: WARNING nova.compute.manager [req-f149389a-c865-4b6c-b71a-5348af06e6cc req-94118293-89f7-4654-a7ce-c0dacacaaa2d service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Received unexpected event network-vif-plugged-537951d4-2e0a-45fd-a9eb-39ddf930b39d for instance with vm_state building and task_state spawning. [ 1037.341906] env[61852]: DEBUG nova.network.neutron [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Successfully updated port: 537951d4-2e0a-45fd-a9eb-39ddf930b39d {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.472435] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52893ce1-e501-d111-a475-273f210fc165, 'name': SearchDatastore_Task, 'duration_secs': 0.009262} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.472435] env[61852]: DEBUG oslo_concurrency.lockutils [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.472435] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk. {{(pid=61852) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1037.472435] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f53df54f-462c-4e67-915c-2aba2967de12 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.485887] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1037.485887] env[61852]: value = "task-1293372" [ 1037.485887] env[61852]: _type = "Task" [ 1037.485887] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.494263] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.533504] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Didn't find any instances for network info cache update. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1037.533717] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1037.533884] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1037.534048] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1037.534205] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1037.534353] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1037.534521] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1037.534652] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1037.534830] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1037.728461] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09079731-228d-4d64-81c8-1c8dfa12fe03 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.736804] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7484356-4be3-4816-b8c3-0d9aaf92a66f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.771854] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecee2b30-45aa-43fc-a1cf-f55a48a51833 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.784049] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da1bfb6-8824-4d87-81b7-dabc1ef80178 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.798541] env[61852]: DEBUG nova.compute.provider_tree [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.844125] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.844329] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquired lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.844492] env[61852]: DEBUG nova.network.neutron [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1037.995285] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480626} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.995513] env[61852]: INFO nova.virt.vmwareapi.ds_util [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk. [ 1037.996261] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e3bf1f-fb5b-428d-ad18-2109deb7aa2c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.020949] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.021209] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2a0d908-ec69-41c9-b275-e436a7b2ff4e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.037992] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.039336] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1038.039336] env[61852]: value = "task-1293373" [ 1038.039336] env[61852]: _type = "Task" [ 1038.039336] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.047449] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293373, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.303581] env[61852]: DEBUG nova.scheduler.client.report [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1038.376611] env[61852]: DEBUG nova.network.neutron [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1038.508468] env[61852]: DEBUG nova.network.neutron [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updating instance_info_cache with network_info: [{"id": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "address": "fa:16:3e:bd:ee:7f", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap537951d4-2e", "ovs_interfaceid": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.548867] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293373, 'name': ReconfigVM_Task, 'duration_secs': 0.313689} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.549165] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca/90fd8f39-16b3-43e0-a682-0ec131005e31-rescue.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.549987] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b4cdd8-6b35-408d-b940-d1a52c586794 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.574275] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a572024-b1c1-41d6-9139-20111b7bcd4c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.588986] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1038.588986] env[61852]: value = "task-1293374" [ 1038.588986] env[61852]: _type = "Task" [ 1038.588986] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.598669] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293374, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.809179] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.809559] env[61852]: DEBUG nova.compute.manager [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1038.812320] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.756s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.813752] env[61852]: INFO nova.compute.claims [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.011192] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Releasing lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.011521] env[61852]: DEBUG nova.compute.manager [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Instance network_info: |[{"id": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "address": "fa:16:3e:bd:ee:7f", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap537951d4-2e", "ovs_interfaceid": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1039.011966] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:ee:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0c293d47-74c0-49d7-a474-cdb643080f6f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '537951d4-2e0a-45fd-a9eb-39ddf930b39d', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1039.020372] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Creating folder: Project (a10be4b0f16c432c87b39b211fbf2fee). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1039.020651] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c2ca86a-b6b7-4dd7-9776-a31f9ea2ec29 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.031616] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Created folder: Project (a10be4b0f16c432c87b39b211fbf2fee) in parent group-v277280. [ 1039.031800] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Creating folder: Instances. Parent ref: group-v277424. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1039.032048] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-201ffa7a-f8fa-4ad3-a059-05137f333a93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.040473] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Created folder: Instances in parent group-v277424. [ 1039.040704] env[61852]: DEBUG oslo.service.loopingcall [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1039.040886] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1039.041099] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7c61879-fa34-4a7a-8e18-796fbfe67f58 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.058832] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1039.058832] env[61852]: value = "task-1293377" [ 1039.058832] env[61852]: _type = "Task" [ 1039.058832] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.066043] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293377, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.098332] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293374, 'name': ReconfigVM_Task, 'duration_secs': 0.18476} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.098608] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1039.098863] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-309c3628-c6cf-4a8f-9da4-5de0046afd43 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.104879] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1039.104879] env[61852]: value = "task-1293378" [ 1039.104879] env[61852]: _type = "Task" [ 1039.104879] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.112582] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293378, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.278442] env[61852]: DEBUG nova.compute.manager [req-23dd86e8-51c3-42be-a8ff-c430c4be5824 req-0d3e69c2-20e9-4703-b176-9d3869383612 service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Received event network-changed-537951d4-2e0a-45fd-a9eb-39ddf930b39d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1039.278686] env[61852]: DEBUG nova.compute.manager [req-23dd86e8-51c3-42be-a8ff-c430c4be5824 req-0d3e69c2-20e9-4703-b176-9d3869383612 service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Refreshing instance network info cache due to event network-changed-537951d4-2e0a-45fd-a9eb-39ddf930b39d. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1039.278941] env[61852]: DEBUG oslo_concurrency.lockutils [req-23dd86e8-51c3-42be-a8ff-c430c4be5824 req-0d3e69c2-20e9-4703-b176-9d3869383612 service nova] Acquiring lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.279288] env[61852]: DEBUG oslo_concurrency.lockutils [req-23dd86e8-51c3-42be-a8ff-c430c4be5824 req-0d3e69c2-20e9-4703-b176-9d3869383612 service nova] Acquired lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.279502] env[61852]: DEBUG nova.network.neutron [req-23dd86e8-51c3-42be-a8ff-c430c4be5824 req-0d3e69c2-20e9-4703-b176-9d3869383612 service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Refreshing network info cache for port 537951d4-2e0a-45fd-a9eb-39ddf930b39d {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1039.318407] env[61852]: DEBUG nova.compute.utils [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1039.324019] env[61852]: DEBUG nova.compute.manager [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1039.324019] env[61852]: DEBUG nova.network.neutron [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1039.365971] env[61852]: DEBUG nova.policy [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9ae5c8b7ad2472fbc3e4042b17c6656', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efc7a7bde6804937bed8a5ac6fe5b4ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 1039.568733] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293377, 'name': CreateVM_Task, 'duration_secs': 0.412409} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.569072] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1039.569567] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.569735] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.570077] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1039.570337] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-763c4a76-73e7-4195-96e2-44eac8b04888 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.575089] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1039.575089] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5289ad7b-4dc1-b45c-b352-7a489a881ae1" [ 1039.575089] env[61852]: _type = "Task" [ 1039.575089] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.582348] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5289ad7b-4dc1-b45c-b352-7a489a881ae1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.614213] env[61852]: DEBUG oslo_vmware.api [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293378, 'name': PowerOnVM_Task, 'duration_secs': 0.396873} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.614510] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.617101] env[61852]: DEBUG nova.compute.manager [None req-5e7fe508-cd06-44c1-9082-86612ef07794 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1039.617878] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffdb847-f639-44a1-9fc5-455a8a957d9e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.634065] env[61852]: DEBUG nova.network.neutron [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Successfully created port: eda7c31e-423a-44d1-9dd3-33ec75a78df2 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1039.823873] env[61852]: DEBUG nova.compute.manager [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1039.904619] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c139456c-1bf6-41ef-b08d-a106e6128554 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.912171] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52544f3e-0d3a-4858-b633-48e32c3d8071 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.947350] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0a29bd-c0b2-4661-a366-dced1bb1e3b8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.955241] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e20a5a0-2c78-4cc2-9fdc-b70da1685659 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.968266] env[61852]: DEBUG nova.compute.provider_tree [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.042499] env[61852]: DEBUG nova.network.neutron [req-23dd86e8-51c3-42be-a8ff-c430c4be5824 req-0d3e69c2-20e9-4703-b176-9d3869383612 service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updated VIF entry in instance network info cache for port 537951d4-2e0a-45fd-a9eb-39ddf930b39d. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1040.042864] env[61852]: DEBUG nova.network.neutron [req-23dd86e8-51c3-42be-a8ff-c430c4be5824 req-0d3e69c2-20e9-4703-b176-9d3869383612 service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updating instance_info_cache with network_info: [{"id": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "address": "fa:16:3e:bd:ee:7f", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap537951d4-2e", "ovs_interfaceid": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.084541] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5289ad7b-4dc1-b45c-b352-7a489a881ae1, 'name': SearchDatastore_Task, 'duration_secs': 0.014614} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.085079] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.085383] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.085618] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.085771] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.085959] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.086229] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae53f928-1ebf-4916-984f-33be9118cc40 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.093965] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.094180] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1040.094832] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2aa1ab45-85d9-4f13-8c1d-edefd1de7147 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.099400] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1040.099400] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52031b8b-05d0-1552-a215-5060768cba38" [ 1040.099400] env[61852]: _type = "Task" [ 1040.099400] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.106557] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52031b8b-05d0-1552-a215-5060768cba38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.332162] env[61852]: INFO nova.virt.block_device [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Booting with volume 6525f8b8-0d76-4718-8cf5-d506fa05ddde at /dev/sda [ 1040.367749] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24d1995a-d3cc-4c8f-9177-9daf48145223 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.377127] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d43fbd-b609-41bf-8430-183f282c821d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.400062] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6febc568-de04-4c58-9267-7b47cb9ce153 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.407770] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31702369-8595-47ba-a612-5f1ae4e4ca2a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.430855] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b4f216-80aa-4bcf-ac40-fa6167cb496c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.437071] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d667c5-8c73-4ff6-bd79-2ac573cf0032 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.451691] env[61852]: DEBUG nova.virt.block_device [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Updating existing volume attachment record: 004d385d-37b2-4ffd-bc18-ada5ff8aad4e {{(pid=61852) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1040.470747] env[61852]: DEBUG nova.scheduler.client.report [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1040.545762] env[61852]: DEBUG oslo_concurrency.lockutils [req-23dd86e8-51c3-42be-a8ff-c430c4be5824 req-0d3e69c2-20e9-4703-b176-9d3869383612 service nova] Releasing lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.610503] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52031b8b-05d0-1552-a215-5060768cba38, 'name': SearchDatastore_Task, 'duration_secs': 0.007146} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.611293] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad60323b-4062-49f0-9c72-46628d23ae07 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.616142] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1040.616142] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525e506b-fe14-3144-083b-445c6cf9606b" [ 1040.616142] env[61852]: _type = "Task" [ 1040.616142] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.623288] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525e506b-fe14-3144-083b-445c6cf9606b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.896405] env[61852]: DEBUG nova.compute.manager [req-2546398a-fcd8-49d9-bdaa-8038c66bbd6f req-8e00afb8-4607-455c-8e83-f3cfce6c1ccd service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Received event network-changed-9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1040.896570] env[61852]: DEBUG nova.compute.manager [req-2546398a-fcd8-49d9-bdaa-8038c66bbd6f req-8e00afb8-4607-455c-8e83-f3cfce6c1ccd service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Refreshing instance network info cache due to event network-changed-9cbc23d9-a543-40b1-ad2b-389d5ebe78be. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1040.896784] env[61852]: DEBUG oslo_concurrency.lockutils [req-2546398a-fcd8-49d9-bdaa-8038c66bbd6f req-8e00afb8-4607-455c-8e83-f3cfce6c1ccd service nova] Acquiring lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.896928] env[61852]: DEBUG oslo_concurrency.lockutils [req-2546398a-fcd8-49d9-bdaa-8038c66bbd6f req-8e00afb8-4607-455c-8e83-f3cfce6c1ccd service nova] Acquired lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.897103] env[61852]: DEBUG nova.network.neutron [req-2546398a-fcd8-49d9-bdaa-8038c66bbd6f req-8e00afb8-4607-455c-8e83-f3cfce6c1ccd service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Refreshing network info cache for port 9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1040.975793] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.163s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.976409] env[61852]: DEBUG nova.compute.manager [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1040.979304] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.941s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.979387] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.979573] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1040.980741] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5fcf69-bbf5-4d61-95cc-93681a315217 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.988894] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51dfe0b3-e378-46ff-807c-f7c62705bc09 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.005094] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8e939f-ae45-4168-8754-a68e6b13c757 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.013330] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f3bc4f-122f-4403-85a5-986173c1e530 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.045644] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181227MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1041.045799] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.046014] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.100791] env[61852]: DEBUG nova.compute.manager [req-a84cc912-10d9-4149-bb7a-6e1326c1b283 req-697c9cbb-90d9-44c3-be9c-b8720ca23175 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Received event network-vif-plugged-eda7c31e-423a-44d1-9dd3-33ec75a78df2 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1041.101026] env[61852]: DEBUG oslo_concurrency.lockutils [req-a84cc912-10d9-4149-bb7a-6e1326c1b283 req-697c9cbb-90d9-44c3-be9c-b8720ca23175 service nova] Acquiring lock "25892f5e-147c-49b7-8009-60755a82a840-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.101238] env[61852]: DEBUG oslo_concurrency.lockutils [req-a84cc912-10d9-4149-bb7a-6e1326c1b283 req-697c9cbb-90d9-44c3-be9c-b8720ca23175 service nova] Lock "25892f5e-147c-49b7-8009-60755a82a840-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.101414] env[61852]: DEBUG oslo_concurrency.lockutils [req-a84cc912-10d9-4149-bb7a-6e1326c1b283 req-697c9cbb-90d9-44c3-be9c-b8720ca23175 service nova] Lock "25892f5e-147c-49b7-8009-60755a82a840-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.101579] env[61852]: DEBUG nova.compute.manager [req-a84cc912-10d9-4149-bb7a-6e1326c1b283 req-697c9cbb-90d9-44c3-be9c-b8720ca23175 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] No waiting events found dispatching network-vif-plugged-eda7c31e-423a-44d1-9dd3-33ec75a78df2 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1041.101744] env[61852]: WARNING nova.compute.manager [req-a84cc912-10d9-4149-bb7a-6e1326c1b283 req-697c9cbb-90d9-44c3-be9c-b8720ca23175 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Received unexpected event network-vif-plugged-eda7c31e-423a-44d1-9dd3-33ec75a78df2 for instance with vm_state building and task_state block_device_mapping. [ 1041.126321] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525e506b-fe14-3144-083b-445c6cf9606b, 'name': SearchDatastore_Task, 'duration_secs': 0.009538} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.126576] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.126821] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d8baa4c3-7da1-450c-8bef-336fbb34ceab/d8baa4c3-7da1-450c-8bef-336fbb34ceab.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.127083] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a3de08a-3895-4b15-a732-db3d25eed3c3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.133008] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1041.133008] env[61852]: value = "task-1293379" [ 1041.133008] env[61852]: _type = "Task" [ 1041.133008] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.140657] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.482719] env[61852]: DEBUG nova.compute.utils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1041.483544] env[61852]: DEBUG nova.compute.manager [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1041.483724] env[61852]: DEBUG nova.network.neutron [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1041.580309] env[61852]: DEBUG nova.policy [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81c41a76b275406c83c80068659e2b04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b019fd876c14428bd8f2de5fa66da4d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 1041.646176] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293379, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.671325] env[61852]: DEBUG nova.network.neutron [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Successfully updated port: eda7c31e-423a-44d1-9dd3-33ec75a78df2 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1041.699121] env[61852]: DEBUG nova.compute.manager [req-f641a8d5-0bb9-4394-a261-8cbe0c27bd76 req-c975d2b6-27d3-4009-ba4f-06cd221cae40 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Received event network-changed-eda7c31e-423a-44d1-9dd3-33ec75a78df2 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1041.699353] env[61852]: DEBUG nova.compute.manager [req-f641a8d5-0bb9-4394-a261-8cbe0c27bd76 req-c975d2b6-27d3-4009-ba4f-06cd221cae40 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Refreshing instance network info cache due to event network-changed-eda7c31e-423a-44d1-9dd3-33ec75a78df2. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1041.699577] env[61852]: DEBUG oslo_concurrency.lockutils [req-f641a8d5-0bb9-4394-a261-8cbe0c27bd76 req-c975d2b6-27d3-4009-ba4f-06cd221cae40 service nova] Acquiring lock "refresh_cache-25892f5e-147c-49b7-8009-60755a82a840" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.699723] env[61852]: DEBUG oslo_concurrency.lockutils [req-f641a8d5-0bb9-4394-a261-8cbe0c27bd76 req-c975d2b6-27d3-4009-ba4f-06cd221cae40 service nova] Acquired lock "refresh_cache-25892f5e-147c-49b7-8009-60755a82a840" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.699891] env[61852]: DEBUG nova.network.neutron [req-f641a8d5-0bb9-4394-a261-8cbe0c27bd76 req-c975d2b6-27d3-4009-ba4f-06cd221cae40 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Refreshing network info cache for port eda7c31e-423a-44d1-9dd3-33ec75a78df2 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.716647] env[61852]: DEBUG nova.network.neutron [req-2546398a-fcd8-49d9-bdaa-8038c66bbd6f req-8e00afb8-4607-455c-8e83-f3cfce6c1ccd service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updated VIF entry in instance network info cache for port 9cbc23d9-a543-40b1-ad2b-389d5ebe78be. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1041.716992] env[61852]: DEBUG nova.network.neutron [req-2546398a-fcd8-49d9-bdaa-8038c66bbd6f req-8e00afb8-4607-455c-8e83-f3cfce6c1ccd service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updating instance_info_cache with network_info: [{"id": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "address": "fa:16:3e:25:d9:2f", "network": {"id": "15708ffb-fab5-4bb1-b3c6-48dfd8fba2dd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-334319416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cda1365a8c014771b0627254d322c3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbc23d9-a5", "ovs_interfaceid": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.920121] env[61852]: DEBUG nova.network.neutron [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Successfully created port: 749e06f0-8fbc-42b0-bbf4-95d75f6733d1 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1041.990814] env[61852]: DEBUG nova.compute.manager [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1042.070983] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1042.071162] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d8baa4c3-7da1-450c-8bef-336fbb34ceab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1042.071286] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 25892f5e-147c-49b7-8009-60755a82a840 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1042.071403] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance ad917577-5285-4f8d-8096-d83424deba33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1042.071574] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1042.071707] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1042.150268] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293379, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521002} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.150472] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d8baa4c3-7da1-450c-8bef-336fbb34ceab/d8baa4c3-7da1-450c-8bef-336fbb34ceab.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1042.150678] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.150933] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2596f332-8201-4f5d-8c10-142e1f9ee321 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.155410] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453af971-dcab-45b8-9450-3849998c7f8d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.158828] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1042.158828] env[61852]: value = "task-1293380" [ 1042.158828] env[61852]: _type = "Task" [ 1042.158828] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.164751] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fca804-6048-4f36-94e7-232f938ab402 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.172362] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293380, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.175093] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Acquiring lock "refresh_cache-25892f5e-147c-49b7-8009-60755a82a840" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.199536] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74799f3-b18c-4ceb-a841-a4e5f22e3681 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.209149] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbc59e7-3b41-4502-bca4-9effab1df6e1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.222046] env[61852]: DEBUG oslo_concurrency.lockutils [req-2546398a-fcd8-49d9-bdaa-8038c66bbd6f req-8e00afb8-4607-455c-8e83-f3cfce6c1ccd service nova] Releasing lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.222526] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.233496] env[61852]: DEBUG nova.network.neutron [req-f641a8d5-0bb9-4394-a261-8cbe0c27bd76 req-c975d2b6-27d3-4009-ba4f-06cd221cae40 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1042.306743] env[61852]: DEBUG nova.network.neutron [req-f641a8d5-0bb9-4394-a261-8cbe0c27bd76 req-c975d2b6-27d3-4009-ba4f-06cd221cae40 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.562339] env[61852]: DEBUG nova.compute.manager [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1042.563289] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1042.563289] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1042.563429] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.563499] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1042.563650] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.563798] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1042.564010] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1042.564185] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1042.564361] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1042.564530] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1042.564761] env[61852]: DEBUG nova.virt.hardware [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1042.565886] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53b28ff-c8be-4bbf-b4df-39b0fd80b385 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.575195] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692440e0-294c-44cf-8a64-e89ed272adde {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.668367] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293380, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068255} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.668656] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1042.669403] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612a1c9d-ea7c-4554-b899-f911b6fdda22 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.694078] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] d8baa4c3-7da1-450c-8bef-336fbb34ceab/d8baa4c3-7da1-450c-8bef-336fbb34ceab.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1042.694382] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3b6b8e2-8ad7-433e-92af-1d9adb2ceff8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.712843] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1042.712843] env[61852]: value = "task-1293381" [ 1042.712843] env[61852]: _type = "Task" [ 1042.712843] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.720297] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293381, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.726201] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.809577] env[61852]: DEBUG oslo_concurrency.lockutils [req-f641a8d5-0bb9-4394-a261-8cbe0c27bd76 req-c975d2b6-27d3-4009-ba4f-06cd221cae40 service nova] Releasing lock "refresh_cache-25892f5e-147c-49b7-8009-60755a82a840" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.809954] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Acquired lock "refresh_cache-25892f5e-147c-49b7-8009-60755a82a840" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.810135] env[61852]: DEBUG nova.network.neutron [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.001859] env[61852]: DEBUG nova.compute.manager [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1043.027689] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1043.027914] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1043.028081] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.028273] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1043.028520] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.028758] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1043.029753] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1043.029753] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1043.029753] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1043.029753] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1043.029753] env[61852]: DEBUG nova.virt.hardware [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1043.030567] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b11cc6-3277-4450-bdfd-dd72f2c2f343 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.038332] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb802afd-3d77-47fb-908f-5388fe2ee1c3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.168114] env[61852]: DEBUG nova.compute.manager [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Received event network-changed-9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1043.168114] env[61852]: DEBUG nova.compute.manager [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Refreshing instance network info cache due to event network-changed-9cbc23d9-a543-40b1-ad2b-389d5ebe78be. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1043.168114] env[61852]: DEBUG oslo_concurrency.lockutils [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] Acquiring lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.168114] env[61852]: DEBUG oslo_concurrency.lockutils [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] Acquired lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.168114] env[61852]: DEBUG nova.network.neutron [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Refreshing network info cache for port 9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.222166] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293381, 'name': ReconfigVM_Task, 'duration_secs': 0.321024} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.222619] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Reconfigured VM instance instance-00000065 to attach disk [datastore1] d8baa4c3-7da1-450c-8bef-336fbb34ceab/d8baa4c3-7da1-450c-8bef-336fbb34ceab.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1043.223450] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35fbd3e6-6597-4943-9327-2e4491f5c543 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.230492] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1043.230814] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.185s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.231282] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1043.231282] env[61852]: value = "task-1293382" [ 1043.231282] env[61852]: _type = "Task" [ 1043.231282] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.240163] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293382, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.359017] env[61852]: DEBUG nova.network.neutron [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1043.493172] env[61852]: DEBUG nova.network.neutron [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Successfully updated port: 749e06f0-8fbc-42b0-bbf4-95d75f6733d1 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1043.536701] env[61852]: DEBUG nova.network.neutron [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Updating instance_info_cache with network_info: [{"id": "eda7c31e-423a-44d1-9dd3-33ec75a78df2", "address": "fa:16:3e:23:ed:e6", "network": {"id": "125cc6de-2911-46cf-b90c-f7b31329f0c5", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1290224231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc7a7bde6804937bed8a5ac6fe5b4ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60e7ee7b-4d02-4d68-af2e-5ab7d9708120", "external-id": "nsx-vlan-transportzone-550", "segmentation_id": 550, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeda7c31e-42", "ovs_interfaceid": "eda7c31e-423a-44d1-9dd3-33ec75a78df2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.743564] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293382, 'name': Rename_Task, 'duration_secs': 0.200034} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.743564] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1043.743564] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-391b966f-052e-46fe-b5e9-bbf8a715dfef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.746251] env[61852]: DEBUG nova.compute.manager [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Received event network-vif-plugged-749e06f0-8fbc-42b0-bbf4-95d75f6733d1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1043.746450] env[61852]: DEBUG oslo_concurrency.lockutils [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] Acquiring lock "ad917577-5285-4f8d-8096-d83424deba33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.746731] env[61852]: DEBUG oslo_concurrency.lockutils [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] Lock "ad917577-5285-4f8d-8096-d83424deba33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.746906] env[61852]: DEBUG oslo_concurrency.lockutils [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] Lock "ad917577-5285-4f8d-8096-d83424deba33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.747106] env[61852]: DEBUG nova.compute.manager [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] No waiting events found dispatching network-vif-plugged-749e06f0-8fbc-42b0-bbf4-95d75f6733d1 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1043.747989] env[61852]: WARNING nova.compute.manager [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Received unexpected event network-vif-plugged-749e06f0-8fbc-42b0-bbf4-95d75f6733d1 for instance with vm_state building and task_state spawning. [ 1043.748249] env[61852]: DEBUG nova.compute.manager [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Received event network-changed-749e06f0-8fbc-42b0-bbf4-95d75f6733d1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1043.748379] env[61852]: DEBUG nova.compute.manager [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Refreshing instance network info cache due to event network-changed-749e06f0-8fbc-42b0-bbf4-95d75f6733d1. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1043.748567] env[61852]: DEBUG oslo_concurrency.lockutils [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] Acquiring lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.748704] env[61852]: DEBUG oslo_concurrency.lockutils [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] Acquired lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.748856] env[61852]: DEBUG nova.network.neutron [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Refreshing network info cache for port 749e06f0-8fbc-42b0-bbf4-95d75f6733d1 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.754622] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1043.754622] env[61852]: value = "task-1293383" [ 1043.754622] env[61852]: _type = "Task" [ 1043.754622] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.766990] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.875273] env[61852]: DEBUG nova.network.neutron [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updated VIF entry in instance network info cache for port 9cbc23d9-a543-40b1-ad2b-389d5ebe78be. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1043.875540] env[61852]: DEBUG nova.network.neutron [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updating instance_info_cache with network_info: [{"id": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "address": "fa:16:3e:25:d9:2f", "network": {"id": "15708ffb-fab5-4bb1-b3c6-48dfd8fba2dd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-334319416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cda1365a8c014771b0627254d322c3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbc23d9-a5", "ovs_interfaceid": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.997451] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.039317] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Releasing lock "refresh_cache-25892f5e-147c-49b7-8009-60755a82a840" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.039788] env[61852]: DEBUG nova.compute.manager [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Instance network_info: |[{"id": "eda7c31e-423a-44d1-9dd3-33ec75a78df2", "address": "fa:16:3e:23:ed:e6", "network": {"id": "125cc6de-2911-46cf-b90c-f7b31329f0c5", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1290224231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc7a7bde6804937bed8a5ac6fe5b4ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60e7ee7b-4d02-4d68-af2e-5ab7d9708120", "external-id": "nsx-vlan-transportzone-550", "segmentation_id": 550, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeda7c31e-42", "ovs_interfaceid": "eda7c31e-423a-44d1-9dd3-33ec75a78df2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1044.040165] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:ed:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60e7ee7b-4d02-4d68-af2e-5ab7d9708120', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eda7c31e-423a-44d1-9dd3-33ec75a78df2', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.047959] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Creating folder: Project (efc7a7bde6804937bed8a5ac6fe5b4ed). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1044.048668] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6a80af3-b970-4cd0-aa84-27969a3f538e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.061823] env[61852]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1044.062014] env[61852]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61852) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1044.062432] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Folder already exists: Project (efc7a7bde6804937bed8a5ac6fe5b4ed). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1044.062639] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Creating folder: Instances. Parent ref: group-v277416. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1044.062896] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af4a3e5f-6d2d-4547-99bf-85a98edc29b8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.071371] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Created folder: Instances in parent group-v277416. [ 1044.071640] env[61852]: DEBUG oslo.service.loopingcall [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.071983] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1044.072095] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10a3e480-5e72-4dde-8604-46c12b96d071 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.090563] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.090563] env[61852]: value = "task-1293386" [ 1044.090563] env[61852]: _type = "Task" [ 1044.090563] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.098211] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293386, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.264721] env[61852]: DEBUG oslo_vmware.api [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293383, 'name': PowerOnVM_Task, 'duration_secs': 0.484087} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.264995] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1044.265224] env[61852]: INFO nova.compute.manager [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Took 7.26 seconds to spawn the instance on the hypervisor. [ 1044.265408] env[61852]: DEBUG nova.compute.manager [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1044.266239] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ec2c7e-ffbb-4e52-810e-56728c175b6b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.288667] env[61852]: DEBUG nova.network.neutron [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1044.366504] env[61852]: DEBUG nova.network.neutron [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.378577] env[61852]: DEBUG oslo_concurrency.lockutils [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] Releasing lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.378871] env[61852]: DEBUG nova.compute.manager [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Received event network-changed-9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1044.379063] env[61852]: DEBUG nova.compute.manager [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Refreshing instance network info cache due to event network-changed-9cbc23d9-a543-40b1-ad2b-389d5ebe78be. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1044.379294] env[61852]: DEBUG oslo_concurrency.lockutils [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] Acquiring lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.379445] env[61852]: DEBUG oslo_concurrency.lockutils [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] Acquired lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.379611] env[61852]: DEBUG nova.network.neutron [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Refreshing network info cache for port 9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1044.600376] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293386, 'name': CreateVM_Task, 'duration_secs': 0.299978} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.600547] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1044.601204] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'delete_on_termination': True, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277420', 'volume_id': '6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'name': 'volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25892f5e-147c-49b7-8009-60755a82a840', 'attached_at': '', 'detached_at': '', 'volume_id': '6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'serial': '6525f8b8-0d76-4718-8cf5-d506fa05ddde'}, 'boot_index': 0, 'mount_device': '/dev/sda', 'attachment_id': '004d385d-37b2-4ffd-bc18-ada5ff8aad4e', 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=61852) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1044.601423] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Root volume attach. Driver type: vmdk {{(pid=61852) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1044.602184] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70329116-3295-4b0e-927f-1a5f57d47fe7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.608841] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquiring lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.609220] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.609452] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquiring lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.609648] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.609824] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.611954] env[61852]: INFO nova.compute.manager [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Terminating instance [ 1044.613708] env[61852]: DEBUG nova.compute.manager [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1044.613878] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1044.614585] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5079ca2f-7f2e-4478-b4c1-9ab7c069f6a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.617535] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70a8c7b-dd6b-4e02-82b1-94861c9b7121 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.624959] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b385f10-3071-4933-a3f9-9db14ce16c72 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.627660] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1044.627913] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f320f60-06cc-4e96-baf9-2aeaf39bc09c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.632577] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-0474743d-1b98-4892-95e8-09b61696c468 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.635532] env[61852]: DEBUG oslo_vmware.api [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1044.635532] env[61852]: value = "task-1293387" [ 1044.635532] env[61852]: _type = "Task" [ 1044.635532] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.641538] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for the task: (returnval){ [ 1044.641538] env[61852]: value = "task-1293388" [ 1044.641538] env[61852]: _type = "Task" [ 1044.641538] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.647530] env[61852]: DEBUG oslo_vmware.api [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.652525] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293388, 'name': RelocateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.787797] env[61852]: INFO nova.compute.manager [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Took 11.97 seconds to build instance. [ 1044.869227] env[61852]: DEBUG oslo_concurrency.lockutils [req-2c7072cd-1269-44a4-b6e9-01c5857b6cc7 req-f91e487f-10b7-4241-b8da-4b52f53b1045 service nova] Releasing lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.869703] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.869833] env[61852]: DEBUG nova.network.neutron [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1045.066082] env[61852]: DEBUG nova.network.neutron [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updated VIF entry in instance network info cache for port 9cbc23d9-a543-40b1-ad2b-389d5ebe78be. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1045.066460] env[61852]: DEBUG nova.network.neutron [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updating instance_info_cache with network_info: [{"id": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "address": "fa:16:3e:25:d9:2f", "network": {"id": "15708ffb-fab5-4bb1-b3c6-48dfd8fba2dd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-334319416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cda1365a8c014771b0627254d322c3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbc23d9-a5", "ovs_interfaceid": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.149282] env[61852]: DEBUG oslo_vmware.api [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293387, 'name': PowerOffVM_Task, 'duration_secs': 0.198091} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.149937] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1045.150269] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1045.150538] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c658232-2d78-46d2-922e-4dc3bea0f10f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.154726] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293388, 'name': RelocateVM_Task} progress is 13%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.205828] env[61852]: DEBUG nova.compute.manager [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Received event network-changed-9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1045.205828] env[61852]: DEBUG nova.compute.manager [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Refreshing instance network info cache due to event network-changed-9cbc23d9-a543-40b1-ad2b-389d5ebe78be. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1045.205828] env[61852]: DEBUG oslo_concurrency.lockutils [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] Acquiring lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.290067] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1469b751-62ce-47a7-9258-fd52b22f94e4 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.486s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.400650] env[61852]: DEBUG nova.network.neutron [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1045.423677] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1045.423915] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1045.424117] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Deleting the datastore file [datastore1] 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1045.424395] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eec253b6-6ae5-4034-81cb-e18195dd2627 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.431454] env[61852]: DEBUG oslo_vmware.api [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for the task: (returnval){ [ 1045.431454] env[61852]: value = "task-1293390" [ 1045.431454] env[61852]: _type = "Task" [ 1045.431454] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.441478] env[61852]: DEBUG oslo_vmware.api [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293390, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.532793] env[61852]: DEBUG nova.network.neutron [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Updating instance_info_cache with network_info: [{"id": "749e06f0-8fbc-42b0-bbf4-95d75f6733d1", "address": "fa:16:3e:61:5f:8e", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749e06f0-8f", "ovs_interfaceid": "749e06f0-8fbc-42b0-bbf4-95d75f6733d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.569341] env[61852]: DEBUG oslo_concurrency.lockutils [req-7976ffa0-0bde-4124-801e-820a0db660a9 req-ec72f224-02dd-46ac-8bb8-0ae68836fb35 service nova] Releasing lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.569893] env[61852]: DEBUG oslo_concurrency.lockutils [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] Acquired lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.570179] env[61852]: DEBUG nova.network.neutron [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Refreshing network info cache for port 9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1045.653864] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293388, 'name': RelocateVM_Task} progress is 35%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.942240] env[61852]: DEBUG oslo_vmware.api [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Task: {'id': task-1293390, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238052} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.942603] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.942644] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.943819] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.943819] env[61852]: INFO nova.compute.manager [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1045.943819] env[61852]: DEBUG oslo.service.loopingcall [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.943819] env[61852]: DEBUG nova.compute.manager [-] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1045.943819] env[61852]: DEBUG nova.network.neutron [-] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1046.035958] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.036311] env[61852]: DEBUG nova.compute.manager [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Instance network_info: |[{"id": "749e06f0-8fbc-42b0-bbf4-95d75f6733d1", "address": "fa:16:3e:61:5f:8e", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749e06f0-8f", "ovs_interfaceid": "749e06f0-8fbc-42b0-bbf4-95d75f6733d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1046.036775] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:5f:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '749e06f0-8fbc-42b0-bbf4-95d75f6733d1', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1046.044698] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Creating folder: Project (3b019fd876c14428bd8f2de5fa66da4d). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1046.044991] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a2b66ac-6d56-4d3b-b474-a4d0f2290b14 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.056492] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Created folder: Project (3b019fd876c14428bd8f2de5fa66da4d) in parent group-v277280. [ 1046.056715] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Creating folder: Instances. Parent ref: group-v277429. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1046.057053] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc88d6cd-3042-46c7-92be-5fcc312c959f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.068404] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Created folder: Instances in parent group-v277429. [ 1046.068667] env[61852]: DEBUG oslo.service.loopingcall [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1046.068873] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad917577-5285-4f8d-8096-d83424deba33] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1046.069121] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c7e1459-a69c-4540-9d72-7044bd1a0f35 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.090195] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1046.090195] env[61852]: value = "task-1293393" [ 1046.090195] env[61852]: _type = "Task" [ 1046.090195] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.100970] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293393, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.155453] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293388, 'name': RelocateVM_Task} progress is 51%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.287797] env[61852]: DEBUG nova.compute.manager [req-ffe5f930-cc47-47c1-aacf-14462ef8c015 req-e97463e6-c379-430d-89e7-6e49ac2eaab7 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Received event network-vif-deleted-9cbc23d9-a543-40b1-ad2b-389d5ebe78be {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1046.288078] env[61852]: INFO nova.compute.manager [req-ffe5f930-cc47-47c1-aacf-14462ef8c015 req-e97463e6-c379-430d-89e7-6e49ac2eaab7 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Neutron deleted interface 9cbc23d9-a543-40b1-ad2b-389d5ebe78be; detaching it from the instance and deleting it from the info cache [ 1046.288411] env[61852]: DEBUG nova.network.neutron [req-ffe5f930-cc47-47c1-aacf-14462ef8c015 req-e97463e6-c379-430d-89e7-6e49ac2eaab7 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.337332] env[61852]: DEBUG nova.network.neutron [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updated VIF entry in instance network info cache for port 9cbc23d9-a543-40b1-ad2b-389d5ebe78be. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.337744] env[61852]: DEBUG nova.network.neutron [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updating instance_info_cache with network_info: [{"id": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "address": "fa:16:3e:25:d9:2f", "network": {"id": "15708ffb-fab5-4bb1-b3c6-48dfd8fba2dd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-334319416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "cda1365a8c014771b0627254d322c3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbc23d9-a5", "ovs_interfaceid": "9cbc23d9-a543-40b1-ad2b-389d5ebe78be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.600450] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293393, 'name': CreateVM_Task, 'duration_secs': 0.40433} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.600622] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad917577-5285-4f8d-8096-d83424deba33] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1046.601452] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.601622] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.602044] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1046.602334] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f441d8a-c2f5-4a3a-80eb-6f731d84eddc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.607522] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1046.607522] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525bca3b-833e-b56c-887b-c2a9535a03cd" [ 1046.607522] env[61852]: _type = "Task" [ 1046.607522] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.616148] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525bca3b-833e-b56c-887b-c2a9535a03cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.654271] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293388, 'name': RelocateVM_Task} progress is 65%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.762985] env[61852]: DEBUG nova.network.neutron [-] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.792842] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35bc888f-a002-497b-87d4-7e6172cabb07 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.803496] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f5b2f9-acd7-4e06-919f-31bc18ce4645 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.831810] env[61852]: DEBUG nova.compute.manager [req-ffe5f930-cc47-47c1-aacf-14462ef8c015 req-e97463e6-c379-430d-89e7-6e49ac2eaab7 service nova] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Detach interface failed, port_id=9cbc23d9-a543-40b1-ad2b-389d5ebe78be, reason: Instance 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1046.840941] env[61852]: DEBUG oslo_concurrency.lockutils [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] Releasing lock "refresh_cache-7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.841300] env[61852]: DEBUG nova.compute.manager [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Received event network-changed-537951d4-2e0a-45fd-a9eb-39ddf930b39d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1046.841510] env[61852]: DEBUG nova.compute.manager [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Refreshing instance network info cache due to event network-changed-537951d4-2e0a-45fd-a9eb-39ddf930b39d. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1046.841758] env[61852]: DEBUG oslo_concurrency.lockutils [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] Acquiring lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.841909] env[61852]: DEBUG oslo_concurrency.lockutils [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] Acquired lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.842123] env[61852]: DEBUG nova.network.neutron [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Refreshing network info cache for port 537951d4-2e0a-45fd-a9eb-39ddf930b39d {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1047.118144] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]525bca3b-833e-b56c-887b-c2a9535a03cd, 'name': SearchDatastore_Task, 'duration_secs': 0.018774} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.118473] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.118612] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.118858] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.119017] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.119254] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1047.119610] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acc27154-a505-4bcb-aa45-c22493d0af5e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.129395] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1047.129766] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1047.130468] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87e017a9-6d7c-4933-8a6e-e28519b080b9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.136352] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1047.136352] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5228f1b8-5292-61cd-941d-82feeebd63de" [ 1047.136352] env[61852]: _type = "Task" [ 1047.136352] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.144531] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5228f1b8-5292-61cd-941d-82feeebd63de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.152767] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293388, 'name': RelocateVM_Task} progress is 81%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.265805] env[61852]: INFO nova.compute.manager [-] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Took 1.32 seconds to deallocate network for instance. [ 1047.581025] env[61852]: DEBUG nova.network.neutron [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updated VIF entry in instance network info cache for port 537951d4-2e0a-45fd-a9eb-39ddf930b39d. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1047.581488] env[61852]: DEBUG nova.network.neutron [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updating instance_info_cache with network_info: [{"id": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "address": "fa:16:3e:bd:ee:7f", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap537951d4-2e", "ovs_interfaceid": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.649180] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5228f1b8-5292-61cd-941d-82feeebd63de, 'name': SearchDatastore_Task, 'duration_secs': 0.016119} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.652840] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12187db0-3f34-4877-91cc-7fe232527df2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.659824] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293388, 'name': RelocateVM_Task} progress is 97%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.661160] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1047.661160] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52027c60-2fec-5262-ff08-7ac645d74085" [ 1047.661160] env[61852]: _type = "Task" [ 1047.661160] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.668642] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52027c60-2fec-5262-ff08-7ac645d74085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.773765] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.774108] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.774432] env[61852]: DEBUG nova.objects.instance [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lazy-loading 'resources' on Instance uuid 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.087547] env[61852]: DEBUG oslo_concurrency.lockutils [req-2b736d31-7623-4aac-ab8b-50356a5e11a9 req-3cc657c6-a431-4e7d-850e-c255b14952eb service nova] Releasing lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.159354] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293388, 'name': RelocateVM_Task} progress is 98%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.170203] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52027c60-2fec-5262-ff08-7ac645d74085, 'name': SearchDatastore_Task, 'duration_secs': 0.035059} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.170478] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.170737] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] ad917577-5285-4f8d-8096-d83424deba33/ad917577-5285-4f8d-8096-d83424deba33.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1048.170993] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b94341fb-0736-43a3-8ab3-a59999be95ac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.177964] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1048.177964] env[61852]: value = "task-1293394" [ 1048.177964] env[61852]: _type = "Task" [ 1048.177964] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.186475] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293394, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.336124] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e07da8-ad76-4cbb-9518-3a13d9686d5b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.342597] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d686139-3367-4632-8484-c29392525083 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.371501] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726fd425-d6f4-4307-9371-7438a8efd33c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.378592] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7f66e0-0c2e-4669-9f32-157896e9c27e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.392377] env[61852]: DEBUG nova.compute.provider_tree [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.658823] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293388, 'name': RelocateVM_Task, 'duration_secs': 3.580262} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.658993] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Volume attach. Driver type: vmdk {{(pid=61852) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1048.659816] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277420', 'volume_id': '6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'name': 'volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25892f5e-147c-49b7-8009-60755a82a840', 'attached_at': '', 'detached_at': '', 'volume_id': '6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'serial': '6525f8b8-0d76-4718-8cf5-d506fa05ddde'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1048.659954] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578cc130-d10f-44d5-b68a-70a74ee018ee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.674914] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587e31b9-e938-4acf-b930-3433f6677687 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.697729] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde/volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.698293] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a8102c7-8572-4630-a659-f2501aae3733 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.715304] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293394, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.719681] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for the task: (returnval){ [ 1048.719681] env[61852]: value = "task-1293395" [ 1048.719681] env[61852]: _type = "Task" [ 1048.719681] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.726784] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293395, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.895656] env[61852]: DEBUG nova.scheduler.client.report [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1049.188673] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293394, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.229717] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293395, 'name': ReconfigVM_Task, 'duration_secs': 0.318489} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.230069] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Reconfigured VM instance instance-00000066 to attach disk [datastore2] volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde/volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1049.234715] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e30c124-1bb3-4578-8084-db3672c28086 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.249530] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for the task: (returnval){ [ 1049.249530] env[61852]: value = "task-1293396" [ 1049.249530] env[61852]: _type = "Task" [ 1049.249530] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.258159] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.402064] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.627s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.419068] env[61852]: INFO nova.scheduler.client.report [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Deleted allocations for instance 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca [ 1049.689376] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293394, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.079135} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.689637] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore2] ad917577-5285-4f8d-8096-d83424deba33/ad917577-5285-4f8d-8096-d83424deba33.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1049.689855] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1049.690471] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3b05efc-ad0d-4a54-bf6f-2b2356da0401 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.696197] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1049.696197] env[61852]: value = "task-1293397" [ 1049.696197] env[61852]: _type = "Task" [ 1049.696197] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.703110] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293397, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.761100] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293396, 'name': ReconfigVM_Task, 'duration_secs': 0.121665} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.761408] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277420', 'volume_id': '6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'name': 'volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25892f5e-147c-49b7-8009-60755a82a840', 'attached_at': '', 'detached_at': '', 'volume_id': '6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'serial': '6525f8b8-0d76-4718-8cf5-d506fa05ddde'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1049.761981] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b120e51-7d8c-4084-9e00-8035104bcf36 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.768010] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for the task: (returnval){ [ 1049.768010] env[61852]: value = "task-1293398" [ 1049.768010] env[61852]: _type = "Task" [ 1049.768010] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.775580] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293398, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.928017] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aebec1e7-eb8e-4118-8ef9-7750a8e459c4 tempest-ServerRescueTestJSONUnderV235-581014482 tempest-ServerRescueTestJSONUnderV235-581014482-project-member] Lock "7601ebe5-ff7a-4bdf-b64a-a5b2de069bca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.319s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.207553] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293397, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.165256} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.207870] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1050.208680] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78df3f27-a177-4126-947a-20ed03962b46 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.232351] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] ad917577-5285-4f8d-8096-d83424deba33/ad917577-5285-4f8d-8096-d83424deba33.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1050.232671] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ab167b3-8a5f-4496-9537-e42512317f7f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.254671] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1050.254671] env[61852]: value = "task-1293399" [ 1050.254671] env[61852]: _type = "Task" [ 1050.254671] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.262739] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293399, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.277343] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293398, 'name': Rename_Task, 'duration_secs': 0.1244} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.277652] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1050.277917] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da95f783-a3af-42bd-8bc8-4360bce7aba0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.283844] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for the task: (returnval){ [ 1050.283844] env[61852]: value = "task-1293400" [ 1050.283844] env[61852]: _type = "Task" [ 1050.283844] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.291407] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293400, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.764959] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293399, 'name': ReconfigVM_Task, 'duration_secs': 0.30608} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.765328] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Reconfigured VM instance instance-00000067 to attach disk [datastore2] ad917577-5285-4f8d-8096-d83424deba33/ad917577-5285-4f8d-8096-d83424deba33.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1050.765968] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98231ec4-2125-4427-95ca-c0c0b8366a61 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.771452] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1050.771452] env[61852]: value = "task-1293401" [ 1050.771452] env[61852]: _type = "Task" [ 1050.771452] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.779527] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293401, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.792184] env[61852]: DEBUG oslo_vmware.api [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293400, 'name': PowerOnVM_Task, 'duration_secs': 0.467067} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.792441] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.792650] env[61852]: INFO nova.compute.manager [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Took 8.23 seconds to spawn the instance on the hypervisor. [ 1050.792838] env[61852]: DEBUG nova.compute.manager [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1050.793613] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09d4033-37ea-45e5-828f-f7178f1a32e5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.281784] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293401, 'name': Rename_Task, 'duration_secs': 0.148235} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.282135] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1051.282372] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0c3093f-6a05-48a7-9411-67a96a03b554 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.288349] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1051.288349] env[61852]: value = "task-1293402" [ 1051.288349] env[61852]: _type = "Task" [ 1051.288349] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.295737] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293402, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.311451] env[61852]: INFO nova.compute.manager [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Took 15.85 seconds to build instance. [ 1051.799789] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293402, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.813436] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d344b014-8701-4b92-9ab9-0a84e22483c1 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lock "25892f5e-147c-49b7-8009-60755a82a840" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.367s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.305054] env[61852]: DEBUG oslo_vmware.api [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293402, 'name': PowerOnVM_Task, 'duration_secs': 0.665301} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.305384] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1052.305594] env[61852]: INFO nova.compute.manager [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Took 9.30 seconds to spawn the instance on the hypervisor. [ 1052.305778] env[61852]: DEBUG nova.compute.manager [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1052.306582] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7d1d57-6134-4abf-a25a-878097d130a5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.356903] env[61852]: DEBUG nova.compute.manager [req-d93b8bca-abb3-4cfd-af15-bef4bcd7cb14 req-55892a2b-c469-4ef8-82be-a34767afe4b4 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Received event network-changed-eda7c31e-423a-44d1-9dd3-33ec75a78df2 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1052.357142] env[61852]: DEBUG nova.compute.manager [req-d93b8bca-abb3-4cfd-af15-bef4bcd7cb14 req-55892a2b-c469-4ef8-82be-a34767afe4b4 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Refreshing instance network info cache due to event network-changed-eda7c31e-423a-44d1-9dd3-33ec75a78df2. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1052.357438] env[61852]: DEBUG oslo_concurrency.lockutils [req-d93b8bca-abb3-4cfd-af15-bef4bcd7cb14 req-55892a2b-c469-4ef8-82be-a34767afe4b4 service nova] Acquiring lock "refresh_cache-25892f5e-147c-49b7-8009-60755a82a840" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.357589] env[61852]: DEBUG oslo_concurrency.lockutils [req-d93b8bca-abb3-4cfd-af15-bef4bcd7cb14 req-55892a2b-c469-4ef8-82be-a34767afe4b4 service nova] Acquired lock "refresh_cache-25892f5e-147c-49b7-8009-60755a82a840" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.357755] env[61852]: DEBUG nova.network.neutron [req-d93b8bca-abb3-4cfd-af15-bef4bcd7cb14 req-55892a2b-c469-4ef8-82be-a34767afe4b4 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Refreshing network info cache for port eda7c31e-423a-44d1-9dd3-33ec75a78df2 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1052.827108] env[61852]: INFO nova.compute.manager [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Took 16.79 seconds to build instance. [ 1053.226158] env[61852]: DEBUG nova.network.neutron [req-d93b8bca-abb3-4cfd-af15-bef4bcd7cb14 req-55892a2b-c469-4ef8-82be-a34767afe4b4 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Updated VIF entry in instance network info cache for port eda7c31e-423a-44d1-9dd3-33ec75a78df2. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1053.226627] env[61852]: DEBUG nova.network.neutron [req-d93b8bca-abb3-4cfd-af15-bef4bcd7cb14 req-55892a2b-c469-4ef8-82be-a34767afe4b4 service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Updating instance_info_cache with network_info: [{"id": "eda7c31e-423a-44d1-9dd3-33ec75a78df2", "address": "fa:16:3e:23:ed:e6", "network": {"id": "125cc6de-2911-46cf-b90c-f7b31329f0c5", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1290224231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efc7a7bde6804937bed8a5ac6fe5b4ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60e7ee7b-4d02-4d68-af2e-5ab7d9708120", "external-id": "nsx-vlan-transportzone-550", "segmentation_id": 550, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeda7c31e-42", "ovs_interfaceid": "eda7c31e-423a-44d1-9dd3-33ec75a78df2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.334981] env[61852]: DEBUG oslo_concurrency.lockutils [None req-fc327318-6eac-4960-bb62-58a9aea82f0f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.306s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.729255] env[61852]: DEBUG oslo_concurrency.lockutils [req-d93b8bca-abb3-4cfd-af15-bef4bcd7cb14 req-55892a2b-c469-4ef8-82be-a34767afe4b4 service nova] Releasing lock "refresh_cache-25892f5e-147c-49b7-8009-60755a82a840" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.921237] env[61852]: DEBUG nova.compute.manager [req-8f8132e5-ad42-4d74-82e4-c6b7234722b8 req-8edaa3c5-970b-4026-84fa-58bb51591bee service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Received event network-changed-749e06f0-8fbc-42b0-bbf4-95d75f6733d1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1053.921448] env[61852]: DEBUG nova.compute.manager [req-8f8132e5-ad42-4d74-82e4-c6b7234722b8 req-8edaa3c5-970b-4026-84fa-58bb51591bee service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Refreshing instance network info cache due to event network-changed-749e06f0-8fbc-42b0-bbf4-95d75f6733d1. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1053.921669] env[61852]: DEBUG oslo_concurrency.lockutils [req-8f8132e5-ad42-4d74-82e4-c6b7234722b8 req-8edaa3c5-970b-4026-84fa-58bb51591bee service nova] Acquiring lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.921819] env[61852]: DEBUG oslo_concurrency.lockutils [req-8f8132e5-ad42-4d74-82e4-c6b7234722b8 req-8edaa3c5-970b-4026-84fa-58bb51591bee service nova] Acquired lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.921983] env[61852]: DEBUG nova.network.neutron [req-8f8132e5-ad42-4d74-82e4-c6b7234722b8 req-8edaa3c5-970b-4026-84fa-58bb51591bee service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Refreshing network info cache for port 749e06f0-8fbc-42b0-bbf4-95d75f6733d1 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1054.642598] env[61852]: DEBUG nova.network.neutron [req-8f8132e5-ad42-4d74-82e4-c6b7234722b8 req-8edaa3c5-970b-4026-84fa-58bb51591bee service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Updated VIF entry in instance network info cache for port 749e06f0-8fbc-42b0-bbf4-95d75f6733d1. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1054.643218] env[61852]: DEBUG nova.network.neutron [req-8f8132e5-ad42-4d74-82e4-c6b7234722b8 req-8edaa3c5-970b-4026-84fa-58bb51591bee service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Updating instance_info_cache with network_info: [{"id": "749e06f0-8fbc-42b0-bbf4-95d75f6733d1", "address": "fa:16:3e:61:5f:8e", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749e06f0-8f", "ovs_interfaceid": "749e06f0-8fbc-42b0-bbf4-95d75f6733d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.146762] env[61852]: DEBUG oslo_concurrency.lockutils [req-8f8132e5-ad42-4d74-82e4-c6b7234722b8 req-8edaa3c5-970b-4026-84fa-58bb51591bee service nova] Releasing lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.157756] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.158149] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Cleaning up deleted instances {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1069.671742] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] There are 48 instances to clean {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1069.672028] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 7601ebe5-ff7a-4bdf-b64a-a5b2de069bca] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1070.174980] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 9db95089-9fd7-42e5-bbf3-64847642ade6] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1070.677842] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 8d6dc967-ebe5-4573-b41a-5793f96b7eec] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1071.180770] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 33667154-991d-4a32-8f16-f292a4725e3e] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1071.684268] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 3ae6fdae-3246-4607-b15d-c320c4dc816b] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1072.188014] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 76fa1b27-bd1f-4794-a56b-88373e79db9a] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1072.691667] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 9f39bee8-52b8-426d-9b8a-114e3a6a6343] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1073.195637] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d9715a56-249f-4c19-a55b-730d352248cb] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1073.698497] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: df332116-2ae3-4e51-99b0-108921470959] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1074.202307] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: fb75509e-3cbf-406e-ad2d-aeb51a68295d] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1074.705991] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 561d33d0-cad5-48ae-bd32-5de2220c5283] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1075.209370] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: ba863c60-444a-4959-8f8f-87b4952d2872] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1075.714059] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d58958f2-7b6f-4480-9710-aa9e67ebd37c] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.216524] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: b99bacc1-21e7-4bbd-8092-549246500421] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.719676] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 8bdb8059-3fb5-4f9c-bc73-b85bf8a23075] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1077.223525] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: cc5e0467-2960-43a1-bd7b-a528d5788028] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1077.726995] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: e97448d7-0162-44bf-95d1-93bdcbcaec25] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.230315] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 4623565b-cd36-498c-a0e9-c3b1c6ef479b] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.733971] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 51ecc9c3-a3fc-4bd7-8c90-003451700212] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1079.237665] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 6cb1968c-b951-4a83-a036-ba50b735133c] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1079.741577] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 4fb68588-21a8-4004-9bbc-aa1655624bcb] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.245186] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: b44c9cc0-3f2b-495a-87ee-f03de8dcec3c] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.748869] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 12e431d3-4c23-4f4c-a619-f0b69a0e31e8] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1081.252345] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: eae1ad1f-f213-4227-93aa-b0ccf660e638] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1081.755651] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 4b85f2d7-d99a-4332-a78c-3f2a50c7cb92] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.259097] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 23ff3009-7b13-4d5e-93ed-ca1c3e9127bb] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.762674] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 254919cb-e3cd-4288-8696-95e632d78a38] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.265999] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 8d8679db-eb9d-45c1-b053-70378f58e273] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.769757] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 21d74604-6a64-44ee-a012-ebff7166853e] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.075169] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.075462] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.272642] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: aeaa2828-6d83-4b26-bd1c-5f654c70713f] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.578809] env[61852]: DEBUG nova.compute.utils [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1084.775543] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: f48b40ab-23f2-4071-8168-e7e2411ad64d] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.081803] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.279587] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 988c0a5c-b84d-44cf-9068-defd7132b0c9] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.782893] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d93b8055-1eb2-4368-a051-289dc5a9d0ed] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.146471] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.146744] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.147120] env[61852]: INFO nova.compute.manager [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Attaching volume bbeb7c67-805f-43f5-a6d8-ef1012fa19e6 to /dev/sdb [ 1086.178614] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc93e535-cd3c-4869-95d5-50223667a4c3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.185186] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b6e2c1-df62-41c4-a5e2-f0d88fff55ed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.198084] env[61852]: DEBUG nova.virt.block_device [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updating existing volume attachment record: c6660795-c8a5-42f6-a308-ab27d4a2fc46 {{(pid=61852) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1086.286011] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: b0f8f7dd-e559-43be-b541-c3da48a07d68] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.789629] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 883a0d5a-f775-4ffc-abf0-921d0ea6cc8c] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1087.293621] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: f8ebb1b7-39c6-486e-ab25-23080d858846] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1087.797415] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 89970cff-cb49-4803-81a5-1675b0ea4aaf] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.111564] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Acquiring lock "25892f5e-147c-49b7-8009-60755a82a840" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.111850] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lock "25892f5e-147c-49b7-8009-60755a82a840" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.112080] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Acquiring lock "25892f5e-147c-49b7-8009-60755a82a840-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.112274] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lock "25892f5e-147c-49b7-8009-60755a82a840-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.112448] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lock "25892f5e-147c-49b7-8009-60755a82a840-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.115653] env[61852]: INFO nova.compute.manager [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Terminating instance [ 1088.117571] env[61852]: DEBUG nova.compute.manager [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1088.117796] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1088.118053] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2314cd60-3aae-44db-8c7f-a23035621707 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.126798] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for the task: (returnval){ [ 1088.126798] env[61852]: value = "task-1293406" [ 1088.126798] env[61852]: _type = "Task" [ 1088.126798] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.134716] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.300798] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 0ec1210f-7d42-4b71-abdc-9f818ffb91ea] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.636471] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293406, 'name': PowerOffVM_Task, 'duration_secs': 0.168715} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.636734] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1088.636934] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Volume detach. Driver type: vmdk {{(pid=61852) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1088.637155] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277420', 'volume_id': '6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'name': 'volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25892f5e-147c-49b7-8009-60755a82a840', 'attached_at': '', 'detached_at': '', 'volume_id': '6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'serial': '6525f8b8-0d76-4718-8cf5-d506fa05ddde'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1088.637902] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8115ae-acc9-41e1-83dd-33d00ed0ef6a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.655607] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f764240-a5a9-4d81-8719-83dac2e737d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.661681] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee56068-8b82-451e-90c3-673208fb7da2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.678741] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e305d1e2-d466-4636-8c69-f7836799ed49 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.693075] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] The volume has not been displaced from its original location: [datastore2] volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde/volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde.vmdk. No consolidation needed. {{(pid=61852) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1088.698258] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1088.698541] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ee974ec-9b2a-4cc9-833b-17c4aaba7399 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.714899] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for the task: (returnval){ [ 1088.714899] env[61852]: value = "task-1293408" [ 1088.714899] env[61852]: _type = "Task" [ 1088.714899] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.724109] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293408, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.803874] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: e265a4be-7b37-40b5-a199-42a7cd945f66] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1089.225866] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293408, 'name': ReconfigVM_Task, 'duration_secs': 0.154445} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.226134] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1089.230692] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff62e85d-57f5-4f29-b59b-750b96f9b0d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.244644] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for the task: (returnval){ [ 1089.244644] env[61852]: value = "task-1293409" [ 1089.244644] env[61852]: _type = "Task" [ 1089.244644] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.252071] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.307798] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 8d733f93-7636-447b-a5d5-53c16c30061f] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1089.754060] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293409, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.810713] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: b0d38886-aacb-4b7e-9530-c5891d9cee66] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.255331] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293409, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.314015] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 8897a654-6805-45b0-b12b-16f7981d33ad] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.740554] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Volume attach. Driver type: vmdk {{(pid=61852) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1090.740836] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277433', 'volume_id': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'name': 'volume-bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd8baa4c3-7da1-450c-8bef-336fbb34ceab', 'attached_at': '', 'detached_at': '', 'volume_id': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'serial': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1090.741732] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2e8b5d-cda5-46d7-8de5-af762acefdea {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.760568] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ed17dc-16c4-4bd0-94b4-3fe28012bf43 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.767614] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293409, 'name': ReconfigVM_Task, 'duration_secs': 1.163581} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.780269] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277420', 'volume_id': '6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'name': 'volume-6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '25892f5e-147c-49b7-8009-60755a82a840', 'attached_at': '', 'detached_at': '', 'volume_id': '6525f8b8-0d76-4718-8cf5-d506fa05ddde', 'serial': '6525f8b8-0d76-4718-8cf5-d506fa05ddde'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1090.780530] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.787999] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-bbeb7c67-805f-43f5-a6d8-ef1012fa19e6/volume-bbeb7c67-805f-43f5-a6d8-ef1012fa19e6.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1090.788923] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2603a63-3b72-49bc-a1ec-52bca75df7c1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.791423] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-171282f1-2ef5-4cb1-8d64-ff1ebe7569fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.808856] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.809929] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ffce1e0-cacf-48ef-8605-f46a5f280ba4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.811310] env[61852]: DEBUG oslo_vmware.api [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1090.811310] env[61852]: value = "task-1293410" [ 1090.811310] env[61852]: _type = "Task" [ 1090.811310] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.820154] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: f18906e9-67b3-4537-9169-9d275e2ec4e4] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.821884] env[61852]: DEBUG oslo_vmware.api [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293410, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.875268] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.875519] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.875661] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Deleting the datastore file [datastore2] 25892f5e-147c-49b7-8009-60755a82a840 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.875930] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4870e972-d9ac-478d-baa5-ba6cdbbf4b70 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.882746] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for the task: (returnval){ [ 1090.882746] env[61852]: value = "task-1293412" [ 1090.882746] env[61852]: _type = "Task" [ 1090.882746] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.891685] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.320881] env[61852]: DEBUG oslo_vmware.api [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293410, 'name': ReconfigVM_Task, 'duration_secs': 0.318522} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.321229] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-bbeb7c67-805f-43f5-a6d8-ef1012fa19e6/volume-bbeb7c67-805f-43f5-a6d8-ef1012fa19e6.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1091.325944] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: c94066d5-2e5f-4059-bdc5-385d517f1d84] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1091.327687] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a893519-935e-4f43-b331-ce5c03c4ef4f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.342493] env[61852]: DEBUG oslo_vmware.api [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1091.342493] env[61852]: value = "task-1293413" [ 1091.342493] env[61852]: _type = "Task" [ 1091.342493] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.350178] env[61852]: DEBUG oslo_vmware.api [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293413, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.392365] env[61852]: DEBUG oslo_vmware.api [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Task: {'id': task-1293412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076406} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.392618] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.392802] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1091.392980] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.393170] env[61852]: INFO nova.compute.manager [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Took 3.28 seconds to destroy the instance on the hypervisor. [ 1091.393431] env[61852]: DEBUG oslo.service.loopingcall [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.393630] env[61852]: DEBUG nova.compute.manager [-] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1091.393724] env[61852]: DEBUG nova.network.neutron [-] [instance: 25892f5e-147c-49b7-8009-60755a82a840] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1091.666675] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "ad917577-5285-4f8d-8096-d83424deba33" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.667239] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.839739] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 46ccab1f-b7af-49df-a38d-af1fa3bac486] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1091.854944] env[61852]: DEBUG oslo_vmware.api [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293413, 'name': ReconfigVM_Task, 'duration_secs': 0.126972} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.855334] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277433', 'volume_id': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'name': 'volume-bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd8baa4c3-7da1-450c-8bef-336fbb34ceab', 'attached_at': '', 'detached_at': '', 'volume_id': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'serial': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1091.860969] env[61852]: DEBUG nova.compute.manager [req-5bb91ef8-bf34-49e1-be10-9cdf31b15e1c req-616d2608-f375-4704-b101-bef9adf931bd service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Received event network-vif-deleted-eda7c31e-423a-44d1-9dd3-33ec75a78df2 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1091.860969] env[61852]: INFO nova.compute.manager [req-5bb91ef8-bf34-49e1-be10-9cdf31b15e1c req-616d2608-f375-4704-b101-bef9adf931bd service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Neutron deleted interface eda7c31e-423a-44d1-9dd3-33ec75a78df2; detaching it from the instance and deleting it from the info cache [ 1091.861536] env[61852]: DEBUG nova.network.neutron [req-5bb91ef8-bf34-49e1-be10-9cdf31b15e1c req-616d2608-f375-4704-b101-bef9adf931bd service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.170775] env[61852]: DEBUG nova.compute.utils [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1092.332635] env[61852]: DEBUG nova.network.neutron [-] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.343245] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: cb50d964-5c0e-4cf3-b652-0f7b7a488f91] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1092.363240] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f8ea9273-1775-441a-b64d-653742d469bb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.373217] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc465a9a-60ee-4d71-902b-28f86a63abc9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.399359] env[61852]: DEBUG nova.compute.manager [req-5bb91ef8-bf34-49e1-be10-9cdf31b15e1c req-616d2608-f375-4704-b101-bef9adf931bd service nova] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Detach interface failed, port_id=eda7c31e-423a-44d1-9dd3-33ec75a78df2, reason: Instance 25892f5e-147c-49b7-8009-60755a82a840 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1092.673388] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.835057] env[61852]: INFO nova.compute.manager [-] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Took 1.44 seconds to deallocate network for instance. [ 1092.845855] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d3922357-383f-4f7e-9c76-4eb688a092b9] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1092.901087] env[61852]: DEBUG nova.objects.instance [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'flavor' on Instance uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.349458] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d48cefda-0b05-4ec0-8c1d-bc25cd491faf] Instance has had 0 of 5 cleanup attempts {{(pid=61852) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1093.382472] env[61852]: INFO nova.compute.manager [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Took 0.55 seconds to detach 1 volumes for instance. [ 1093.385506] env[61852]: DEBUG nova.compute.manager [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] [instance: 25892f5e-147c-49b7-8009-60755a82a840] Deleting volume: 6525f8b8-0d76-4718-8cf5-d506fa05ddde {{(pid=61852) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1093.406367] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d57f740a-022a-4336-925e-bcf04d1c9d04 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.260s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.470212] env[61852]: DEBUG oslo_concurrency.lockutils [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.470555] env[61852]: DEBUG oslo_concurrency.lockutils [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.470757] env[61852]: DEBUG nova.compute.manager [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1093.471667] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1bde05-aeef-4289-81bd-8e0775c3ac82 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.478291] env[61852]: DEBUG nova.compute.manager [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61852) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1093.478815] env[61852]: DEBUG nova.objects.instance [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'flavor' on Instance uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.728282] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "ad917577-5285-4f8d-8096-d83424deba33" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.728565] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.728813] env[61852]: INFO nova.compute.manager [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Attaching volume 450a2b14-09d5-4fed-8158-d8a1c6d4cdd1 to /dev/sdb [ 1093.758531] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52f566e-42ca-4ee6-99d5-cf4c52a0a29b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.765678] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5453727d-a388-4a34-b8cd-ec6808f1e16d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.778049] env[61852]: DEBUG nova.virt.block_device [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Updating existing volume attachment record: 4e8202ae-03c8-4dea-b6e6-a7053540693e {{(pid=61852) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1093.852651] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.852833] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61852) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1093.922608] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.922941] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.923163] env[61852]: DEBUG nova.objects.instance [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lazy-loading 'resources' on Instance uuid 25892f5e-147c-49b7-8009-60755a82a840 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.985170] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1093.985441] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32fa5da3-c40a-44fe-80bf-67e23aa5a83c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.991683] env[61852]: DEBUG oslo_vmware.api [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1093.991683] env[61852]: value = "task-1293416" [ 1093.991683] env[61852]: _type = "Task" [ 1093.991683] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.000346] env[61852]: DEBUG oslo_vmware.api [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.355992] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.501967] env[61852]: DEBUG oslo_vmware.api [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293416, 'name': PowerOffVM_Task, 'duration_secs': 0.209044} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.502265] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1094.502447] env[61852]: DEBUG nova.compute.manager [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1094.503274] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4918dabc-a1d4-4e72-8701-a110706bc464 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.647428] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecb0007-6fae-49db-8361-671561dc4590 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.654900] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f80cd9f-70c6-46dc-a3e6-d63daa8986ef {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.683702] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9822ef7-7fe9-4c6b-85b8-662ad42b122f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.690367] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde1837b-2b28-4c44-bd3a-d86873a0c1d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.702741] env[61852]: DEBUG nova.compute.provider_tree [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.016825] env[61852]: DEBUG oslo_concurrency.lockutils [None req-448d10c0-78fc-496d-9182-ec6c4662e647 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.546s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.205412] env[61852]: DEBUG nova.scheduler.client.report [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1095.710730] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.788s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.733516] env[61852]: INFO nova.scheduler.client.report [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Deleted allocations for instance 25892f5e-147c-49b7-8009-60755a82a840 [ 1095.859308] env[61852]: DEBUG nova.objects.instance [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'flavor' on Instance uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.241427] env[61852]: DEBUG oslo_concurrency.lockutils [None req-6968414e-22ab-480d-bfc7-b81a53893c26 tempest-ServersTestBootFromVolume-1227508965 tempest-ServersTestBootFromVolume-1227508965-project-member] Lock "25892f5e-147c-49b7-8009-60755a82a840" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.129s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.364045] env[61852]: DEBUG oslo_concurrency.lockutils [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.364244] env[61852]: DEBUG oslo_concurrency.lockutils [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquired lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.364433] env[61852]: DEBUG nova.network.neutron [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1096.364612] env[61852]: DEBUG nova.objects.instance [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'info_cache' on Instance uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.867695] env[61852]: DEBUG nova.objects.base [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1097.638183] env[61852]: DEBUG nova.network.neutron [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updating instance_info_cache with network_info: [{"id": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "address": "fa:16:3e:bd:ee:7f", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap537951d4-2e", "ovs_interfaceid": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.140794] env[61852]: DEBUG oslo_concurrency.lockutils [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Releasing lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.322515] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Volume attach. Driver type: vmdk {{(pid=61852) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1098.322775] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277435', 'volume_id': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'name': 'volume-450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ad917577-5285-4f8d-8096-d83424deba33', 'attached_at': '', 'detached_at': '', 'volume_id': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'serial': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1098.323699] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa49a3bb-535e-4b4f-9c12-403b9043553d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.339912] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d23f6ce-f897-471e-b6ec-d54bff6801cf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.365552] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-450a2b14-09d5-4fed-8158-d8a1c6d4cdd1/volume-450a2b14-09d5-4fed-8158-d8a1c6d4cdd1.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1098.365824] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee6fb732-aae3-4b69-bff3-9e64b1f28f8b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.384158] env[61852]: DEBUG oslo_vmware.api [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1098.384158] env[61852]: value = "task-1293420" [ 1098.384158] env[61852]: _type = "Task" [ 1098.384158] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.392090] env[61852]: DEBUG oslo_vmware.api [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293420, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.644725] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1098.645115] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1d5d891-1773-4f45-87e3-0fc36d8e5f44 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.653149] env[61852]: DEBUG oslo_vmware.api [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1098.653149] env[61852]: value = "task-1293421" [ 1098.653149] env[61852]: _type = "Task" [ 1098.653149] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.662333] env[61852]: DEBUG oslo_vmware.api [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293421, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.856860] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.857242] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.894611] env[61852]: DEBUG oslo_vmware.api [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293420, 'name': ReconfigVM_Task, 'duration_secs': 0.435097} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.894917] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-450a2b14-09d5-4fed-8158-d8a1c6d4cdd1/volume-450a2b14-09d5-4fed-8158-d8a1c6d4cdd1.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.900300] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f37a108-decb-4ed7-8db2-be35c77a3f2b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.915365] env[61852]: DEBUG oslo_vmware.api [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1098.915365] env[61852]: value = "task-1293422" [ 1098.915365] env[61852]: _type = "Task" [ 1098.915365] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.923491] env[61852]: DEBUG oslo_vmware.api [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293422, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.162945] env[61852]: DEBUG oslo_vmware.api [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293421, 'name': PowerOnVM_Task, 'duration_secs': 0.413962} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.163420] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1099.163420] env[61852]: DEBUG nova.compute.manager [None req-db564549-a2b7-49e3-8d7c-17960a945bd2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1099.164631] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f418c439-bd24-4209-b905-5f1edf0fbe11 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.235965] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquiring lock "20eb193b-7104-4d3d-977d-577d3f048b7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.236227] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lock "20eb193b-7104-4d3d-977d-577d3f048b7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.363060] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1099.363060] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1099.363060] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Rebuilding the list of instances to heal {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1099.425203] env[61852]: DEBUG oslo_vmware.api [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293422, 'name': ReconfigVM_Task, 'duration_secs': 0.168299} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.425519] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277435', 'volume_id': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'name': 'volume-450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ad917577-5285-4f8d-8096-d83424deba33', 'attached_at': '', 'detached_at': '', 'volume_id': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'serial': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1099.738389] env[61852]: DEBUG nova.compute.manager [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1099.900140] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.900303] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquired lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.900451] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Forcefully refreshing network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1099.900604] env[61852]: DEBUG nova.objects.instance [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lazy-loading 'info_cache' on Instance uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1100.261466] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.261731] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.263165] env[61852]: INFO nova.compute.claims [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1100.461673] env[61852]: DEBUG nova.objects.instance [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lazy-loading 'flavor' on Instance uuid ad917577-5285-4f8d-8096-d83424deba33 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1100.966825] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b56739b6-9a73-4d71-9fc1-0954a986e577 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.238s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.324505] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06901a8b-14f2-4ee6-851d-73eb494e9d7a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.333245] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256b95f9-e0cc-414d-add3-01e14671f0fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.365017] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d286c44-3039-4303-ac3d-c7a7d36ea2a3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.372224] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83881900-8dae-4ac8-a54d-150e89351219 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.385379] env[61852]: DEBUG nova.compute.provider_tree [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.634399] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updating instance_info_cache with network_info: [{"id": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "address": "fa:16:3e:bd:ee:7f", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap537951d4-2e", "ovs_interfaceid": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.888462] env[61852]: DEBUG nova.scheduler.client.report [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1102.126355] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "645184de-66ce-4b79-a7e3-84e0a0dfe330" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.126602] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "645184de-66ce-4b79-a7e3-84e0a0dfe330" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.136681] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Releasing lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.136863] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updated the network info_cache for instance {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1102.137074] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.137274] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.137676] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.137871] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.138050] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.138210] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.138343] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1102.138501] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.392898] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.393466] env[61852]: DEBUG nova.compute.manager [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1102.628613] env[61852]: DEBUG nova.compute.manager [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1102.641504] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.641665] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.641828] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.642022] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1102.643232] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ea43b8-7886-4f9f-bbcd-4c99ae6f26cc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.651228] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a044ff-8a4e-4aa9-b54e-2f1eb3c852be {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.665422] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4640ab13-3e28-4624-be19-18c0fa11a3a7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.672186] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd879d34-0216-4709-86af-36329cbe749a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.702568] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181099MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1102.702737] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.702896] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.898565] env[61852]: DEBUG nova.compute.utils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1102.899884] env[61852]: DEBUG nova.compute.manager [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Not allocating networking since 'none' was specified. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1103.151282] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.401426] env[61852]: DEBUG nova.compute.manager [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1103.723496] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d8baa4c3-7da1-450c-8bef-336fbb34ceab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.723650] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance ad917577-5285-4f8d-8096-d83424deba33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1103.723776] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 20eb193b-7104-4d3d-977d-577d3f048b7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.226253] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 645184de-66ce-4b79-a7e3-84e0a0dfe330 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1104.226495] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1104.226649] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1104.281974] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1746747-d8ed-457b-a20c-4e17da9aa126 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.289456] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a453d3-f045-40c6-afde-2a642f31b874 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.318840] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6be02b9-9184-477c-ae5d-8485f8c782d7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.325303] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f762c5-386c-4df7-b563-7cc240fa8a69 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.337617] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.409877] env[61852]: DEBUG nova.compute.manager [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1104.436880] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1104.437113] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1104.437306] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.437519] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1104.437674] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.437822] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1104.438044] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1104.438227] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1104.438435] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1104.438610] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1104.438786] env[61852]: DEBUG nova.virt.hardware [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1104.439616] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df117c20-9b89-4dce-8748-197922fbb496 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.447253] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e374a4a-fe7a-4dbe-bca1-743b552ae15d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.459869] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Instance VIF info [] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1104.465288] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Creating folder: Project (a139b0a946db4ead944cc594ccf07544). Parent ref: group-v277280. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1104.465537] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db2c1eb1-a3f2-4603-abb8-afd008f874e9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.476880] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Created folder: Project (a139b0a946db4ead944cc594ccf07544) in parent group-v277280. [ 1104.477075] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Creating folder: Instances. Parent ref: group-v277436. {{(pid=61852) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1104.477313] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb91dabf-ce11-4c42-8f68-7a8b67499f0f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.485662] env[61852]: INFO nova.virt.vmwareapi.vm_util [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Created folder: Instances in parent group-v277436. [ 1104.485887] env[61852]: DEBUG oslo.service.loopingcall [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1104.486078] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1104.486270] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fac4232-2c6e-4648-87ea-50318aa9d51f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.501449] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1104.501449] env[61852]: value = "task-1293425" [ 1104.501449] env[61852]: _type = "Task" [ 1104.501449] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.507961] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293425, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.840304] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1105.011560] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293425, 'name': CreateVM_Task, 'duration_secs': 0.264281} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.011745] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1105.012183] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.012349] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.012692] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1105.012949] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5a5c1df-ed5f-4ae7-b132-6c99eaac0b61 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.017286] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for the task: (returnval){ [ 1105.017286] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e08ff8-698c-7e46-cf2a-6a44566aa637" [ 1105.017286] env[61852]: _type = "Task" [ 1105.017286] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.024458] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e08ff8-698c-7e46-cf2a-6a44566aa637, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.345394] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1105.345572] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.643s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.345854] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.195s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.347532] env[61852]: INFO nova.compute.claims [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1105.527195] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e08ff8-698c-7e46-cf2a-6a44566aa637, 'name': SearchDatastore_Task, 'duration_secs': 0.015301} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.527553] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.527742] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1105.527983] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.528150] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.528343] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1105.528613] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-345bb7c8-65db-4c39-b443-61ea68bd7a87 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.536117] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1105.536295] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1105.536968] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-036443b4-6ea6-4e3c-a364-fcfd1381e8b5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.541472] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for the task: (returnval){ [ 1105.541472] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5222b6d8-8304-50b1-3645-1404d77000d9" [ 1105.541472] env[61852]: _type = "Task" [ 1105.541472] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.549699] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5222b6d8-8304-50b1-3645-1404d77000d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.052172] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5222b6d8-8304-50b1-3645-1404d77000d9, 'name': SearchDatastore_Task, 'duration_secs': 0.007471} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.052883] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eeff78d-47b5-4d1b-847d-480883686895 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.057947] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for the task: (returnval){ [ 1106.057947] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526cd1ca-d413-f899-de58-d5dfcddbb442" [ 1106.057947] env[61852]: _type = "Task" [ 1106.057947] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.065255] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526cd1ca-d413-f899-de58-d5dfcddbb442, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.414832] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2119423c-ddb6-4425-86a9-5bca1b9e5cf0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.426068] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeea873e-18f6-44a2-9639-d26a48033478 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.477583] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a90a3c-ff02-45fc-b5bd-e5d6924cd888 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.488748] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e13b2f7-5948-4dc7-92db-dee0157317ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.510170] env[61852]: DEBUG nova.compute.provider_tree [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1106.571523] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526cd1ca-d413-f899-de58-d5dfcddbb442, 'name': SearchDatastore_Task, 'duration_secs': 0.009017} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.571886] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.572248] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 20eb193b-7104-4d3d-977d-577d3f048b7d/20eb193b-7104-4d3d-977d-577d3f048b7d.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1106.572585] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5d50cab-f126-4540-aa50-9c5fad090ed8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.580829] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for the task: (returnval){ [ 1106.580829] env[61852]: value = "task-1293426" [ 1106.580829] env[61852]: _type = "Task" [ 1106.580829] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.593703] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293426, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.013899] env[61852]: DEBUG nova.scheduler.client.report [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1107.090929] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293426, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452781} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.091206] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 20eb193b-7104-4d3d-977d-577d3f048b7d/20eb193b-7104-4d3d-977d-577d3f048b7d.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1107.091426] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1107.091683] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21bc59ab-ef1f-44e2-bd39-3f80c7cd265a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.098090] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for the task: (returnval){ [ 1107.098090] env[61852]: value = "task-1293427" [ 1107.098090] env[61852]: _type = "Task" [ 1107.098090] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.105766] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293427, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.518609] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.173s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.519168] env[61852]: DEBUG nova.compute.manager [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1107.608754] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293427, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06142} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.609113] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1107.610134] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97103343-f8da-451d-85dc-d8271eb46923 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.631436] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 20eb193b-7104-4d3d-977d-577d3f048b7d/20eb193b-7104-4d3d-977d-577d3f048b7d.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1107.631729] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b30dc78-cf4a-405f-80b7-17e1d7419716 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.650582] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for the task: (returnval){ [ 1107.650582] env[61852]: value = "task-1293428" [ 1107.650582] env[61852]: _type = "Task" [ 1107.650582] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.657960] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293428, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.024063] env[61852]: DEBUG nova.compute.utils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1108.025554] env[61852]: DEBUG nova.compute.manager [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1108.025730] env[61852]: DEBUG nova.network.neutron [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1108.075028] env[61852]: DEBUG nova.policy [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81c41a76b275406c83c80068659e2b04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b019fd876c14428bd8f2de5fa66da4d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 1108.160834] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293428, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.335511] env[61852]: DEBUG nova.network.neutron [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Successfully created port: 78d56179-c49e-4786-8486-fdd5d7717696 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1108.528579] env[61852]: DEBUG nova.compute.manager [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1108.660433] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293428, 'name': ReconfigVM_Task, 'duration_secs': 0.5881} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.661896] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 20eb193b-7104-4d3d-977d-577d3f048b7d/20eb193b-7104-4d3d-977d-577d3f048b7d.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1108.661896] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6438a073-caa8-48a5-a737-0b70389484f8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.667648] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for the task: (returnval){ [ 1108.667648] env[61852]: value = "task-1293429" [ 1108.667648] env[61852]: _type = "Task" [ 1108.667648] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.676720] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293429, 'name': Rename_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.177368] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293429, 'name': Rename_Task, 'duration_secs': 0.125614} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.177671] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1109.177926] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81bb0716-546b-46cd-808d-59f8281efa98 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.184227] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for the task: (returnval){ [ 1109.184227] env[61852]: value = "task-1293430" [ 1109.184227] env[61852]: _type = "Task" [ 1109.184227] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.192795] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293430, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.538383] env[61852]: DEBUG nova.compute.manager [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1109.564016] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1109.564298] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1109.564484] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1109.564678] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1109.564832] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1109.564985] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1109.565215] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1109.565379] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1109.565548] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1109.565712] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1109.565887] env[61852]: DEBUG nova.virt.hardware [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1109.566744] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f078cb-967c-4800-8719-303047442df0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.574907] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4687c1a8-7231-45b8-abd7-866f95b650d3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.690623] env[61852]: DEBUG nova.compute.manager [req-24128f44-b5d5-4678-bc0f-c0a88954b64c req-8e9706d8-2da4-454a-9205-ea78a22ab8f6 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Received event network-vif-plugged-78d56179-c49e-4786-8486-fdd5d7717696 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1109.690901] env[61852]: DEBUG oslo_concurrency.lockutils [req-24128f44-b5d5-4678-bc0f-c0a88954b64c req-8e9706d8-2da4-454a-9205-ea78a22ab8f6 service nova] Acquiring lock "645184de-66ce-4b79-a7e3-84e0a0dfe330-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.690983] env[61852]: DEBUG oslo_concurrency.lockutils [req-24128f44-b5d5-4678-bc0f-c0a88954b64c req-8e9706d8-2da4-454a-9205-ea78a22ab8f6 service nova] Lock "645184de-66ce-4b79-a7e3-84e0a0dfe330-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.691171] env[61852]: DEBUG oslo_concurrency.lockutils [req-24128f44-b5d5-4678-bc0f-c0a88954b64c req-8e9706d8-2da4-454a-9205-ea78a22ab8f6 service nova] Lock "645184de-66ce-4b79-a7e3-84e0a0dfe330-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.691342] env[61852]: DEBUG nova.compute.manager [req-24128f44-b5d5-4678-bc0f-c0a88954b64c req-8e9706d8-2da4-454a-9205-ea78a22ab8f6 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] No waiting events found dispatching network-vif-plugged-78d56179-c49e-4786-8486-fdd5d7717696 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1109.691512] env[61852]: WARNING nova.compute.manager [req-24128f44-b5d5-4678-bc0f-c0a88954b64c req-8e9706d8-2da4-454a-9205-ea78a22ab8f6 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Received unexpected event network-vif-plugged-78d56179-c49e-4786-8486-fdd5d7717696 for instance with vm_state building and task_state spawning. [ 1109.697365] env[61852]: DEBUG oslo_vmware.api [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293430, 'name': PowerOnVM_Task, 'duration_secs': 0.394874} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.697610] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1109.697805] env[61852]: INFO nova.compute.manager [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Took 5.29 seconds to spawn the instance on the hypervisor. [ 1109.697983] env[61852]: DEBUG nova.compute.manager [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1109.698730] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40db731a-a440-46cb-bae0-441e6917a9ad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.769216] env[61852]: DEBUG nova.network.neutron [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Successfully updated port: 78d56179-c49e-4786-8486-fdd5d7717696 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1110.214316] env[61852]: INFO nova.compute.manager [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Took 9.97 seconds to build instance. [ 1110.271477] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "refresh_cache-645184de-66ce-4b79-a7e3-84e0a0dfe330" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1110.271638] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "refresh_cache-645184de-66ce-4b79-a7e3-84e0a0dfe330" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.271767] env[61852]: DEBUG nova.network.neutron [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1110.716571] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1a1d45ad-b9b0-41b3-a81d-21fa242bbe59 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lock "20eb193b-7104-4d3d-977d-577d3f048b7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.480s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.803178] env[61852]: DEBUG nova.network.neutron [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1110.926041] env[61852]: DEBUG nova.network.neutron [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Updating instance_info_cache with network_info: [{"id": "78d56179-c49e-4786-8486-fdd5d7717696", "address": "fa:16:3e:c6:3c:de", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d56179-c4", "ovs_interfaceid": "78d56179-c49e-4786-8486-fdd5d7717696", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.158299] env[61852]: DEBUG nova.compute.manager [None req-842b9802-4267-47c8-b732-c8007eb09769 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1111.159203] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991e24ee-6af3-421d-94b3-b7efe4275e87 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.220777] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquiring lock "20eb193b-7104-4d3d-977d-577d3f048b7d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.220931] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lock "20eb193b-7104-4d3d-977d-577d3f048b7d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.221094] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquiring lock "20eb193b-7104-4d3d-977d-577d3f048b7d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.221288] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lock "20eb193b-7104-4d3d-977d-577d3f048b7d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.221466] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lock "20eb193b-7104-4d3d-977d-577d3f048b7d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.223552] env[61852]: INFO nova.compute.manager [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Terminating instance [ 1111.225162] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquiring lock "refresh_cache-20eb193b-7104-4d3d-977d-577d3f048b7d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.225320] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquired lock "refresh_cache-20eb193b-7104-4d3d-977d-577d3f048b7d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.225489] env[61852]: DEBUG nova.network.neutron [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1111.428843] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "refresh_cache-645184de-66ce-4b79-a7e3-84e0a0dfe330" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.429132] env[61852]: DEBUG nova.compute.manager [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Instance network_info: |[{"id": "78d56179-c49e-4786-8486-fdd5d7717696", "address": "fa:16:3e:c6:3c:de", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d56179-c4", "ovs_interfaceid": "78d56179-c49e-4786-8486-fdd5d7717696", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1111.429603] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:3c:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78d56179-c49e-4786-8486-fdd5d7717696', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1111.436859] env[61852]: DEBUG oslo.service.loopingcall [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1111.437080] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1111.437312] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c61f1b3-9612-4b8b-ac71-ba255a9d22c4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.456290] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1111.456290] env[61852]: value = "task-1293431" [ 1111.456290] env[61852]: _type = "Task" [ 1111.456290] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.463492] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293431, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.671251] env[61852]: INFO nova.compute.manager [None req-842b9802-4267-47c8-b732-c8007eb09769 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] instance snapshotting [ 1111.671867] env[61852]: DEBUG nova.objects.instance [None req-842b9802-4267-47c8-b732-c8007eb09769 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lazy-loading 'flavor' on Instance uuid 20eb193b-7104-4d3d-977d-577d3f048b7d {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1111.713863] env[61852]: DEBUG nova.compute.manager [req-9a9dfaf4-8308-4e58-8894-811c57aaa706 req-11297a58-f9a1-4ea7-86a4-e9afa89eb0f5 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Received event network-changed-78d56179-c49e-4786-8486-fdd5d7717696 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1111.714152] env[61852]: DEBUG nova.compute.manager [req-9a9dfaf4-8308-4e58-8894-811c57aaa706 req-11297a58-f9a1-4ea7-86a4-e9afa89eb0f5 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Refreshing instance network info cache due to event network-changed-78d56179-c49e-4786-8486-fdd5d7717696. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1111.714400] env[61852]: DEBUG oslo_concurrency.lockutils [req-9a9dfaf4-8308-4e58-8894-811c57aaa706 req-11297a58-f9a1-4ea7-86a4-e9afa89eb0f5 service nova] Acquiring lock "refresh_cache-645184de-66ce-4b79-a7e3-84e0a0dfe330" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.714584] env[61852]: DEBUG oslo_concurrency.lockutils [req-9a9dfaf4-8308-4e58-8894-811c57aaa706 req-11297a58-f9a1-4ea7-86a4-e9afa89eb0f5 service nova] Acquired lock "refresh_cache-645184de-66ce-4b79-a7e3-84e0a0dfe330" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.714784] env[61852]: DEBUG nova.network.neutron [req-9a9dfaf4-8308-4e58-8894-811c57aaa706 req-11297a58-f9a1-4ea7-86a4-e9afa89eb0f5 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Refreshing network info cache for port 78d56179-c49e-4786-8486-fdd5d7717696 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1111.741561] env[61852]: DEBUG nova.network.neutron [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1111.785775] env[61852]: DEBUG nova.network.neutron [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.966732] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293431, 'name': CreateVM_Task, 'duration_secs': 0.269118} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.966896] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1111.967583] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.967753] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.968112] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1111.968397] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10ffeda2-688e-4b01-98fe-473a3ff0813f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.972922] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1111.972922] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a090d4-7e89-4d6a-40e0-84be98d28ec6" [ 1111.972922] env[61852]: _type = "Task" [ 1111.972922] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.980875] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a090d4-7e89-4d6a-40e0-84be98d28ec6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.179070] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd87808-98ae-4af0-ade7-cf1c4cdb7fcf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.195407] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb1ac22-e260-43d0-a84c-a70db417c6d1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.290854] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Releasing lock "refresh_cache-20eb193b-7104-4d3d-977d-577d3f048b7d" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.291324] env[61852]: DEBUG nova.compute.manager [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1112.291527] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1112.294397] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233b493f-e2c4-4417-b412-617a486c17a1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.303731] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1112.303976] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21364151-2229-4f92-9028-6d8027e444b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.309919] env[61852]: DEBUG oslo_vmware.api [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for the task: (returnval){ [ 1112.309919] env[61852]: value = "task-1293432" [ 1112.309919] env[61852]: _type = "Task" [ 1112.309919] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.317009] env[61852]: DEBUG oslo_vmware.api [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293432, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.409214] env[61852]: DEBUG nova.network.neutron [req-9a9dfaf4-8308-4e58-8894-811c57aaa706 req-11297a58-f9a1-4ea7-86a4-e9afa89eb0f5 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Updated VIF entry in instance network info cache for port 78d56179-c49e-4786-8486-fdd5d7717696. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1112.409632] env[61852]: DEBUG nova.network.neutron [req-9a9dfaf4-8308-4e58-8894-811c57aaa706 req-11297a58-f9a1-4ea7-86a4-e9afa89eb0f5 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Updating instance_info_cache with network_info: [{"id": "78d56179-c49e-4786-8486-fdd5d7717696", "address": "fa:16:3e:c6:3c:de", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d56179-c4", "ovs_interfaceid": "78d56179-c49e-4786-8486-fdd5d7717696", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.482327] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52a090d4-7e89-4d6a-40e0-84be98d28ec6, 'name': SearchDatastore_Task, 'duration_secs': 0.008459} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.482710] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.482955] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1112.483215] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.483368] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.483565] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1112.484120] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c56b939b-3c75-4ec4-8022-0fcf4cad5c36 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.490922] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1112.491114] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1112.491847] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d46f346-5f00-43ff-a1cc-ac344971c4e0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.496454] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1112.496454] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5255bf9f-ca3c-a330-07cc-09d10ea665e4" [ 1112.496454] env[61852]: _type = "Task" [ 1112.496454] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.503654] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5255bf9f-ca3c-a330-07cc-09d10ea665e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.705410] env[61852]: DEBUG nova.compute.manager [None req-842b9802-4267-47c8-b732-c8007eb09769 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Instance disappeared during snapshot {{(pid=61852) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 1112.819767] env[61852]: DEBUG oslo_vmware.api [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293432, 'name': PowerOffVM_Task, 'duration_secs': 0.17655} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.820065] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1112.820220] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1112.820490] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09945be3-253b-48c5-9aff-ab5c833029cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.827690] env[61852]: DEBUG nova.compute.manager [None req-842b9802-4267-47c8-b732-c8007eb09769 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Found 0 images (rotation: 2) {{(pid=61852) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 1112.843398] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1112.843641] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1112.843809] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Deleting the datastore file [datastore1] 20eb193b-7104-4d3d-977d-577d3f048b7d {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1112.844069] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d38b67b-437b-42e2-a1aa-7dacde3e0836 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.851020] env[61852]: DEBUG oslo_vmware.api [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for the task: (returnval){ [ 1112.851020] env[61852]: value = "task-1293434" [ 1112.851020] env[61852]: _type = "Task" [ 1112.851020] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.858630] env[61852]: DEBUG oslo_vmware.api [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293434, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.912177] env[61852]: DEBUG oslo_concurrency.lockutils [req-9a9dfaf4-8308-4e58-8894-811c57aaa706 req-11297a58-f9a1-4ea7-86a4-e9afa89eb0f5 service nova] Releasing lock "refresh_cache-645184de-66ce-4b79-a7e3-84e0a0dfe330" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.006433] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5255bf9f-ca3c-a330-07cc-09d10ea665e4, 'name': SearchDatastore_Task, 'duration_secs': 0.007869} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.007207] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9809487a-fc73-46af-b590-3f6a80877542 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.012288] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1113.012288] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522129ae-02e3-2200-508f-d7c8dd084df3" [ 1113.012288] env[61852]: _type = "Task" [ 1113.012288] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.019725] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522129ae-02e3-2200-508f-d7c8dd084df3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.360804] env[61852]: DEBUG oslo_vmware.api [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Task: {'id': task-1293434, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166768} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.361079] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.361274] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.361455] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.361630] env[61852]: INFO nova.compute.manager [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1113.361901] env[61852]: DEBUG oslo.service.loopingcall [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1113.362112] env[61852]: DEBUG nova.compute.manager [-] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1113.362209] env[61852]: DEBUG nova.network.neutron [-] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1113.377400] env[61852]: DEBUG nova.network.neutron [-] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1113.522347] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522129ae-02e3-2200-508f-d7c8dd084df3, 'name': SearchDatastore_Task, 'duration_secs': 0.031167} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.522890] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.522890] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 645184de-66ce-4b79-a7e3-84e0a0dfe330/645184de-66ce-4b79-a7e3-84e0a0dfe330.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1113.523101] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a752ad8-4a4d-4d55-8bd1-ad6063021452 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.529809] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1113.529809] env[61852]: value = "task-1293435" [ 1113.529809] env[61852]: _type = "Task" [ 1113.529809] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.537369] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293435, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.880156] env[61852]: DEBUG nova.network.neutron [-] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.038848] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293435, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458997} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.039132] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 645184de-66ce-4b79-a7e3-84e0a0dfe330/645184de-66ce-4b79-a7e3-84e0a0dfe330.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1114.039370] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1114.039628] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-971f0dd4-6788-48ce-96bd-0cccf27d6ce0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.047140] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1114.047140] env[61852]: value = "task-1293436" [ 1114.047140] env[61852]: _type = "Task" [ 1114.047140] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.053798] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293436, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.383325] env[61852]: INFO nova.compute.manager [-] [instance: 20eb193b-7104-4d3d-977d-577d3f048b7d] Took 1.02 seconds to deallocate network for instance. [ 1114.557261] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293436, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061186} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.557579] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1114.558377] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11a7177-b44f-4a5d-89d1-c569b6dc8924 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.579694] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 645184de-66ce-4b79-a7e3-84e0a0dfe330/645184de-66ce-4b79-a7e3-84e0a0dfe330.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1114.579957] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2db600d2-7047-4c71-94fc-2c923a6fa91f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.598651] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1114.598651] env[61852]: value = "task-1293437" [ 1114.598651] env[61852]: _type = "Task" [ 1114.598651] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.605803] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293437, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.889960] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.890470] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.890470] env[61852]: DEBUG nova.objects.instance [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lazy-loading 'resources' on Instance uuid 20eb193b-7104-4d3d-977d-577d3f048b7d {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.108787] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293437, 'name': ReconfigVM_Task, 'duration_secs': 0.252468} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.109073] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 645184de-66ce-4b79-a7e3-84e0a0dfe330/645184de-66ce-4b79-a7e3-84e0a0dfe330.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1115.109687] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0dad61a5-9678-488f-8f01-4bc49ca03317 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.115598] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1115.115598] env[61852]: value = "task-1293438" [ 1115.115598] env[61852]: _type = "Task" [ 1115.115598] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.122576] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293438, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.452396] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55949e22-4c97-4bf6-9060-615bc81e90c7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.459709] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ea0069-4a49-48d0-b37b-2d38a68c4ebc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.489930] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da1854e-14f5-431c-8f2d-704645d487be {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.496524] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bac4312-fb0d-4454-ab0c-8d4d45ff29ed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.508954] env[61852]: DEBUG nova.compute.provider_tree [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.624579] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293438, 'name': Rename_Task, 'duration_secs': 0.158384} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.624871] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1115.625145] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c9bcb73-9d64-483e-b758-04d5fbcf8e5a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.631157] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1115.631157] env[61852]: value = "task-1293439" [ 1115.631157] env[61852]: _type = "Task" [ 1115.631157] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.638271] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293439, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.012357] env[61852]: DEBUG nova.scheduler.client.report [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1116.140944] env[61852]: DEBUG oslo_vmware.api [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293439, 'name': PowerOnVM_Task, 'duration_secs': 0.426222} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.141234] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1116.141440] env[61852]: INFO nova.compute.manager [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Took 6.60 seconds to spawn the instance on the hypervisor. [ 1116.141622] env[61852]: DEBUG nova.compute.manager [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1116.142372] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db38d3c-51da-4003-ae80-e13cfdc8bb4a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.517110] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.627s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.535076] env[61852]: INFO nova.scheduler.client.report [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Deleted allocations for instance 20eb193b-7104-4d3d-977d-577d3f048b7d [ 1116.658302] env[61852]: INFO nova.compute.manager [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Took 13.52 seconds to build instance. [ 1117.043282] env[61852]: DEBUG oslo_concurrency.lockutils [None req-05aa9513-bca3-4cbb-920b-e4aafcaeb896 tempest-ServersAaction247Test-712321824 tempest-ServersAaction247Test-712321824-project-member] Lock "20eb193b-7104-4d3d-977d-577d3f048b7d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.822s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.160315] env[61852]: DEBUG oslo_concurrency.lockutils [None req-84cb1b96-e2ac-4884-8f80-9c70fb5e8142 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "645184de-66ce-4b79-a7e3-84e0a0dfe330" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.033s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.504785] env[61852]: DEBUG nova.compute.manager [req-f15e28e4-ebba-4797-b408-d352d75baa6a req-2dac6039-d92f-4f9f-94d6-b701d3da519a service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Received event network-changed-78d56179-c49e-4786-8486-fdd5d7717696 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1117.505103] env[61852]: DEBUG nova.compute.manager [req-f15e28e4-ebba-4797-b408-d352d75baa6a req-2dac6039-d92f-4f9f-94d6-b701d3da519a service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Refreshing instance network info cache due to event network-changed-78d56179-c49e-4786-8486-fdd5d7717696. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1117.505436] env[61852]: DEBUG oslo_concurrency.lockutils [req-f15e28e4-ebba-4797-b408-d352d75baa6a req-2dac6039-d92f-4f9f-94d6-b701d3da519a service nova] Acquiring lock "refresh_cache-645184de-66ce-4b79-a7e3-84e0a0dfe330" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1117.505668] env[61852]: DEBUG oslo_concurrency.lockutils [req-f15e28e4-ebba-4797-b408-d352d75baa6a req-2dac6039-d92f-4f9f-94d6-b701d3da519a service nova] Acquired lock "refresh_cache-645184de-66ce-4b79-a7e3-84e0a0dfe330" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.505923] env[61852]: DEBUG nova.network.neutron [req-f15e28e4-ebba-4797-b408-d352d75baa6a req-2dac6039-d92f-4f9f-94d6-b701d3da519a service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Refreshing network info cache for port 78d56179-c49e-4786-8486-fdd5d7717696 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1118.221737] env[61852]: DEBUG nova.network.neutron [req-f15e28e4-ebba-4797-b408-d352d75baa6a req-2dac6039-d92f-4f9f-94d6-b701d3da519a service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Updated VIF entry in instance network info cache for port 78d56179-c49e-4786-8486-fdd5d7717696. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1118.222148] env[61852]: DEBUG nova.network.neutron [req-f15e28e4-ebba-4797-b408-d352d75baa6a req-2dac6039-d92f-4f9f-94d6-b701d3da519a service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Updating instance_info_cache with network_info: [{"id": "78d56179-c49e-4786-8486-fdd5d7717696", "address": "fa:16:3e:c6:3c:de", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d56179-c4", "ovs_interfaceid": "78d56179-c49e-4786-8486-fdd5d7717696", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.725074] env[61852]: DEBUG oslo_concurrency.lockutils [req-f15e28e4-ebba-4797-b408-d352d75baa6a req-2dac6039-d92f-4f9f-94d6-b701d3da519a service nova] Releasing lock "refresh_cache-645184de-66ce-4b79-a7e3-84e0a0dfe330" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.921190] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.425479] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Getting list of instances from cluster (obj){ [ 1132.425479] env[61852]: value = "domain-c8" [ 1132.425479] env[61852]: _type = "ClusterComputeResource" [ 1132.425479] env[61852]: } {{(pid=61852) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1132.426564] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057ae897-39b4-4e9a-bea9-352e37692781 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.438280] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Got total of 3 instances {{(pid=61852) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1132.438445] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Triggering sync for uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1132.438636] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Triggering sync for uuid ad917577-5285-4f8d-8096-d83424deba33 {{(pid=61852) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1132.438794] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Triggering sync for uuid 645184de-66ce-4b79-a7e3-84e0a0dfe330 {{(pid=61852) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1132.439111] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.439333] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.439596] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "ad917577-5285-4f8d-8096-d83424deba33" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.439783] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "ad917577-5285-4f8d-8096-d83424deba33" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.440025] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "645184de-66ce-4b79-a7e3-84e0a0dfe330" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.440212] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "645184de-66ce-4b79-a7e3-84e0a0dfe330" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.441017] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2010e820-6006-41c1-88c1-e7976c8ccee2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.444252] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0554dc-7253-4841-9d60-cf6935e85f33 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.447008] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754be200-af64-432e-a632-108474569842 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.954599] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.515s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.959232] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "645184de-66ce-4b79-a7e3-84e0a0dfe330" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.519s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.959548] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "ad917577-5285-4f8d-8096-d83424deba33" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.520s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.157602] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.157900] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.158075] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1135.157488] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.157488] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.157626] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.157931] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.179642] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.179899] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.661018] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.661315] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.661545] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.661748] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1136.662713] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba67c740-a9a9-41d9-b481-c3e9f63145f8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.670790] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88bf0a06-6b27-455d-af88-629ac1f2e8b9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.683618] env[61852]: INFO nova.compute.manager [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Detaching volume bbeb7c67-805f-43f5-a6d8-ef1012fa19e6 [ 1136.685925] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7538be13-52cf-4781-a78a-69022c6c7557 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.692433] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478e2fef-e400-4254-bf72-a9647268227b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.721748] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181130MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1136.721896] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.722105] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.734263] env[61852]: INFO nova.virt.block_device [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Attempting to driver detach volume bbeb7c67-805f-43f5-a6d8-ef1012fa19e6 from mountpoint /dev/sdb [ 1136.734553] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Volume detach. Driver type: vmdk {{(pid=61852) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1136.734754] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277433', 'volume_id': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'name': 'volume-bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd8baa4c3-7da1-450c-8bef-336fbb34ceab', 'attached_at': '', 'detached_at': '', 'volume_id': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'serial': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1136.735594] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5160ae-7e8c-4b6a-982f-75f16f773eb9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.756976] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5831c7c7-2228-4cd9-9399-6c30067f8319 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.763699] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30416bfa-d6eb-4408-b753-8ec315638dc0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.784688] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1911eaa3-e2c4-4ccf-9ceb-b9e87d711568 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.797931] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] The volume has not been displaced from its original location: [datastore1] volume-bbeb7c67-805f-43f5-a6d8-ef1012fa19e6/volume-bbeb7c67-805f-43f5-a6d8-ef1012fa19e6.vmdk. No consolidation needed. {{(pid=61852) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1136.802989] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1136.803448] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-283d2f08-7572-413c-8262-549667c6e128 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.820700] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1136.820700] env[61852]: value = "task-1293440" [ 1136.820700] env[61852]: _type = "Task" [ 1136.820700] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.829568] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293440, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.330029] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293440, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.747268] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d8baa4c3-7da1-450c-8bef-336fbb34ceab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1137.747473] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance ad917577-5285-4f8d-8096-d83424deba33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1137.747602] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 645184de-66ce-4b79-a7e3-84e0a0dfe330 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1137.747778] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1137.747917] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1137.792891] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28ef384-3716-4fef-86d5-23b07d6ed97b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.799974] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341b0c70-ba64-45f3-b6fd-35bcf4e806f8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.830779] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04f17ea-50f1-44dd-a73b-b08799678f3c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.837629] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293440, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.840432] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cff8c01-4b27-4cac-be08-86e80b299795 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.852779] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.337333] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293440, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.355377] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1138.838575] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293440, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.859415] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1138.859598] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.137s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.338726] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293440, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.839672] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293440, 'name': ReconfigVM_Task, 'duration_secs': 2.927579} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.839966] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1139.844507] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91e49941-ed8f-447c-a5a7-68d632e138c1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.858393] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1139.858393] env[61852]: value = "task-1293441" [ 1139.858393] env[61852]: _type = "Task" [ 1139.858393] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.858772] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.858914] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1139.867334] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293441, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.369074] env[61852]: DEBUG oslo_vmware.api [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293441, 'name': ReconfigVM_Task, 'duration_secs': 0.129139} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.369674] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277433', 'volume_id': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'name': 'volume-bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd8baa4c3-7da1-450c-8bef-336fbb34ceab', 'attached_at': '', 'detached_at': '', 'volume_id': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6', 'serial': 'bbeb7c67-805f-43f5-a6d8-ef1012fa19e6'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1140.418190] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.418348] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquired lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.418501] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: ad917577-5285-4f8d-8096-d83424deba33] Forcefully refreshing network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1140.909534] env[61852]: DEBUG nova.objects.instance [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'flavor' on Instance uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.639968] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: ad917577-5285-4f8d-8096-d83424deba33] Updating instance_info_cache with network_info: [{"id": "749e06f0-8fbc-42b0-bbf4-95d75f6733d1", "address": "fa:16:3e:61:5f:8e", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749e06f0-8f", "ovs_interfaceid": "749e06f0-8fbc-42b0-bbf4-95d75f6733d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.917248] env[61852]: DEBUG oslo_concurrency.lockutils [None req-a17c9ad7-e91e-4423-b865-b9dc149eb71c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 5.737s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.142227] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Releasing lock "refresh_cache-ad917577-5285-4f8d-8096-d83424deba33" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.142439] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: ad917577-5285-4f8d-8096-d83424deba33] Updated the network info_cache for instance {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1142.142647] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.142869] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.442310] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.442530] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.442713] env[61852]: DEBUG nova.compute.manager [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1142.443629] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc75a7ec-f6ff-4174-872e-56858181da57 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.450469] env[61852]: DEBUG nova.compute.manager [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61852) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1142.451054] env[61852]: DEBUG nova.objects.instance [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'flavor' on Instance uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.955866] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1142.956225] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4788846-dfea-4f91-a683-967fef467551 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.962742] env[61852]: DEBUG oslo_vmware.api [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1142.962742] env[61852]: value = "task-1293442" [ 1142.962742] env[61852]: _type = "Task" [ 1142.962742] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.971881] env[61852]: DEBUG oslo_vmware.api [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.472609] env[61852]: DEBUG oslo_vmware.api [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293442, 'name': PowerOffVM_Task, 'duration_secs': 0.193802} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.472890] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1143.473089] env[61852]: DEBUG nova.compute.manager [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1143.473828] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00840770-1087-479b-958d-f17cd5a73132 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.984138] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0f493254-d3a2-40c4-97a8-7c15ad458b7a tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.541s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.824877] env[61852]: DEBUG nova.objects.instance [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'flavor' on Instance uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.330168] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.330493] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquired lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.330493] env[61852]: DEBUG nova.network.neutron [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1145.330674] env[61852]: DEBUG nova.objects.instance [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'info_cache' on Instance uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.834165] env[61852]: DEBUG nova.objects.base [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61852) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1146.539753] env[61852]: DEBUG nova.network.neutron [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updating instance_info_cache with network_info: [{"id": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "address": "fa:16:3e:bd:ee:7f", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap537951d4-2e", "ovs_interfaceid": "537951d4-2e0a-45fd-a9eb-39ddf930b39d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.042764] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Releasing lock "refresh_cache-d8baa4c3-7da1-450c-8bef-336fbb34ceab" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.546734] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.547121] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb428964-dfbd-4cee-8633-867c9a3e43b2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.554585] env[61852]: DEBUG oslo_vmware.api [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1147.554585] env[61852]: value = "task-1293443" [ 1147.554585] env[61852]: _type = "Task" [ 1147.554585] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.561670] env[61852]: DEBUG oslo_vmware.api [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.064896] env[61852]: DEBUG oslo_vmware.api [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293443, 'name': PowerOnVM_Task, 'duration_secs': 0.367053} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.065174] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1148.065367] env[61852]: DEBUG nova.compute.manager [None req-e115e0d0-e4e8-4127-9aea-8d0b14b350e9 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1148.066117] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f4db4f-708f-402b-abc5-76f20efffa82 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.512777] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "645184de-66ce-4b79-a7e3-84e0a0dfe330" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.513164] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "645184de-66ce-4b79-a7e3-84e0a0dfe330" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.513305] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "645184de-66ce-4b79-a7e3-84e0a0dfe330-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.513498] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "645184de-66ce-4b79-a7e3-84e0a0dfe330-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.513675] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "645184de-66ce-4b79-a7e3-84e0a0dfe330-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.516735] env[61852]: INFO nova.compute.manager [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Terminating instance [ 1155.518629] env[61852]: DEBUG nova.compute.manager [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1155.518850] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1155.519700] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b524e7-d84a-4a82-a985-36101f91cb0b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.527467] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1155.527964] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-348d2b39-400e-4242-8ce9-cc6ad1e3d50f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.533711] env[61852]: DEBUG oslo_vmware.api [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1155.533711] env[61852]: value = "task-1293444" [ 1155.533711] env[61852]: _type = "Task" [ 1155.533711] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.541530] env[61852]: DEBUG oslo_vmware.api [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293444, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.043531] env[61852]: DEBUG oslo_vmware.api [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293444, 'name': PowerOffVM_Task, 'duration_secs': 0.164345} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.043531] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1156.043764] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.043899] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f6509b7-735e-48e2-b807-c0f4378d186b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.099482] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.099744] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.099996] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleting the datastore file [datastore1] 645184de-66ce-4b79-a7e3-84e0a0dfe330 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.100307] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ae1f7c4-be30-4981-9f38-ef023eb47877 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.107266] env[61852]: DEBUG oslo_vmware.api [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1156.107266] env[61852]: value = "task-1293446" [ 1156.107266] env[61852]: _type = "Task" [ 1156.107266] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.616872] env[61852]: DEBUG oslo_vmware.api [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140496} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.617251] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.617355] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1156.617537] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1156.617733] env[61852]: INFO nova.compute.manager [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1156.617980] env[61852]: DEBUG oslo.service.loopingcall [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1156.618196] env[61852]: DEBUG nova.compute.manager [-] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1156.618293] env[61852]: DEBUG nova.network.neutron [-] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1157.088259] env[61852]: DEBUG nova.compute.manager [req-c0e3d89b-5707-4cad-8e78-031dc59ed949 req-0ccd5b9c-7b1c-4442-b9b8-8ca31354c459 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Received event network-vif-deleted-78d56179-c49e-4786-8486-fdd5d7717696 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1157.088259] env[61852]: INFO nova.compute.manager [req-c0e3d89b-5707-4cad-8e78-031dc59ed949 req-0ccd5b9c-7b1c-4442-b9b8-8ca31354c459 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Neutron deleted interface 78d56179-c49e-4786-8486-fdd5d7717696; detaching it from the instance and deleting it from the info cache [ 1157.088259] env[61852]: DEBUG nova.network.neutron [req-c0e3d89b-5707-4cad-8e78-031dc59ed949 req-0ccd5b9c-7b1c-4442-b9b8-8ca31354c459 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.536053] env[61852]: DEBUG nova.network.neutron [-] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.590987] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-832261bf-416f-429d-b951-e16758853a1f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.600644] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9910262-6bbd-46c2-a5ba-241800badf8c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.628345] env[61852]: DEBUG nova.compute.manager [req-c0e3d89b-5707-4cad-8e78-031dc59ed949 req-0ccd5b9c-7b1c-4442-b9b8-8ca31354c459 service nova] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Detach interface failed, port_id=78d56179-c49e-4786-8486-fdd5d7717696, reason: Instance 645184de-66ce-4b79-a7e3-84e0a0dfe330 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1158.038680] env[61852]: INFO nova.compute.manager [-] [instance: 645184de-66ce-4b79-a7e3-84e0a0dfe330] Took 1.42 seconds to deallocate network for instance. [ 1158.545243] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.545502] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.545737] env[61852]: DEBUG nova.objects.instance [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lazy-loading 'resources' on Instance uuid 645184de-66ce-4b79-a7e3-84e0a0dfe330 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1159.096702] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b84897a-f8fe-4df1-9ea1-751085ff065c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.103626] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5534b6-3f26-40b7-839c-3c76600a67bd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.132184] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c88c217-cabc-4dcf-9251-79cd3937c643 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.138587] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a16e79-9d93-45ad-b72d-822fcfadaa93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.151130] env[61852]: DEBUG nova.compute.provider_tree [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.654011] env[61852]: DEBUG nova.scheduler.client.report [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1160.159843] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.614s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.181718] env[61852]: INFO nova.scheduler.client.report [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleted allocations for instance 645184de-66ce-4b79-a7e3-84e0a0dfe330 [ 1160.692620] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ea6d2893-d7cb-451d-86c7-f3470f44d2ef tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "645184de-66ce-4b79-a7e3-84e0a0dfe330" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.179s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.366817] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "ad917577-5285-4f8d-8096-d83424deba33" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.367228] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.870748] env[61852]: INFO nova.compute.manager [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Detaching volume 450a2b14-09d5-4fed-8158-d8a1c6d4cdd1 [ 1161.900176] env[61852]: INFO nova.virt.block_device [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Attempting to driver detach volume 450a2b14-09d5-4fed-8158-d8a1c6d4cdd1 from mountpoint /dev/sdb [ 1161.900427] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Volume detach. Driver type: vmdk {{(pid=61852) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1161.900637] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277435', 'volume_id': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'name': 'volume-450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ad917577-5285-4f8d-8096-d83424deba33', 'attached_at': '', 'detached_at': '', 'volume_id': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'serial': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1161.901539] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214be279-fc3b-494e-b683-450359ffcee1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.922333] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cda0d6e-2e3a-47ba-a042-1cdc8b29a0bd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.928896] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9636fb5f-8a6d-4579-969b-8ead110bdda8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.948767] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d69db5-8802-49f6-90b7-5a21a6559c93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.962707] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] The volume has not been displaced from its original location: [datastore1] volume-450a2b14-09d5-4fed-8158-d8a1c6d4cdd1/volume-450a2b14-09d5-4fed-8158-d8a1c6d4cdd1.vmdk. No consolidation needed. {{(pid=61852) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1161.968087] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1161.968368] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d922dc53-3008-4bb2-aa63-f34af4b6018c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.984714] env[61852]: DEBUG oslo_vmware.api [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1161.984714] env[61852]: value = "task-1293447" [ 1161.984714] env[61852]: _type = "Task" [ 1161.984714] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.993387] env[61852]: DEBUG oslo_vmware.api [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293447, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.494511] env[61852]: DEBUG oslo_vmware.api [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293447, 'name': ReconfigVM_Task, 'duration_secs': 0.217078} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.494925] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1162.499318] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f30171b9-aae5-43c1-8931-db3926eb3d40 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.513957] env[61852]: DEBUG oslo_vmware.api [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1162.513957] env[61852]: value = "task-1293448" [ 1162.513957] env[61852]: _type = "Task" [ 1162.513957] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.521582] env[61852]: DEBUG oslo_vmware.api [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293448, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.023888] env[61852]: DEBUG oslo_vmware.api [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293448, 'name': ReconfigVM_Task, 'duration_secs': 0.129687} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.024210] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277435', 'volume_id': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'name': 'volume-450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ad917577-5285-4f8d-8096-d83424deba33', 'attached_at': '', 'detached_at': '', 'volume_id': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1', 'serial': '450a2b14-09d5-4fed-8158-d8a1c6d4cdd1'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1163.563536] env[61852]: DEBUG nova.objects.instance [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lazy-loading 'flavor' on Instance uuid ad917577-5285-4f8d-8096-d83424deba33 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1164.571743] env[61852]: DEBUG oslo_concurrency.lockutils [None req-b3968d53-37a0-4ca3-a380-36c31a7abc6f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.204s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.602111] env[61852]: DEBUG oslo_concurrency.lockutils [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "ad917577-5285-4f8d-8096-d83424deba33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.602442] env[61852]: DEBUG oslo_concurrency.lockutils [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.602608] env[61852]: DEBUG oslo_concurrency.lockutils [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "ad917577-5285-4f8d-8096-d83424deba33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.602799] env[61852]: DEBUG oslo_concurrency.lockutils [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.602976] env[61852]: DEBUG oslo_concurrency.lockutils [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.605347] env[61852]: INFO nova.compute.manager [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Terminating instance [ 1165.607230] env[61852]: DEBUG nova.compute.manager [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1165.607403] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1165.608296] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a45ce1-f2b8-4b74-b40b-485a231c6e61 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.615791] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.616293] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ead1eb22-1526-4d87-9725-5f8cc503984f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.622260] env[61852]: DEBUG oslo_vmware.api [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1165.622260] env[61852]: value = "task-1293449" [ 1165.622260] env[61852]: _type = "Task" [ 1165.622260] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.629445] env[61852]: DEBUG oslo_vmware.api [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.131984] env[61852]: DEBUG oslo_vmware.api [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293449, 'name': PowerOffVM_Task, 'duration_secs': 0.171528} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.132272] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1166.132438] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1166.132687] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff894497-2f78-4de2-9a6a-82ffba25da28 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.189991] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1166.190255] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Deleting contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1166.190447] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleting the datastore file [datastore2] ad917577-5285-4f8d-8096-d83424deba33 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1166.190727] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8877caa9-43ba-4f52-a4b7-5de99d44de13 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.199023] env[61852]: DEBUG oslo_vmware.api [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1166.199023] env[61852]: value = "task-1293451" [ 1166.199023] env[61852]: _type = "Task" [ 1166.199023] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.205987] env[61852]: DEBUG oslo_vmware.api [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.710020] env[61852]: DEBUG oslo_vmware.api [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293451, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131065} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.710361] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1166.710498] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Deleted contents of the VM from datastore datastore2 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1166.710648] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1166.710831] env[61852]: INFO nova.compute.manager [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: ad917577-5285-4f8d-8096-d83424deba33] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1166.711099] env[61852]: DEBUG oslo.service.loopingcall [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1166.711301] env[61852]: DEBUG nova.compute.manager [-] [instance: ad917577-5285-4f8d-8096-d83424deba33] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1166.711396] env[61852]: DEBUG nova.network.neutron [-] [instance: ad917577-5285-4f8d-8096-d83424deba33] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1167.157188] env[61852]: DEBUG nova.compute.manager [req-f2e0815b-0edf-49d3-a1a0-a5272534629f req-4c58937c-fc81-4a41-aae2-f1e01d66f9eb service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Received event network-vif-deleted-749e06f0-8fbc-42b0-bbf4-95d75f6733d1 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1167.157532] env[61852]: INFO nova.compute.manager [req-f2e0815b-0edf-49d3-a1a0-a5272534629f req-4c58937c-fc81-4a41-aae2-f1e01d66f9eb service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Neutron deleted interface 749e06f0-8fbc-42b0-bbf4-95d75f6733d1; detaching it from the instance and deleting it from the info cache [ 1167.157691] env[61852]: DEBUG nova.network.neutron [req-f2e0815b-0edf-49d3-a1a0-a5272534629f req-4c58937c-fc81-4a41-aae2-f1e01d66f9eb service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.637701] env[61852]: DEBUG nova.network.neutron [-] [instance: ad917577-5285-4f8d-8096-d83424deba33] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.660762] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9bbb969e-9a9c-4f7e-8e3f-3e6ca3587240 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.669920] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b28aad6-d0a8-46a2-9c83-6561966b81ad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.692823] env[61852]: DEBUG nova.compute.manager [req-f2e0815b-0edf-49d3-a1a0-a5272534629f req-4c58937c-fc81-4a41-aae2-f1e01d66f9eb service nova] [instance: ad917577-5285-4f8d-8096-d83424deba33] Detach interface failed, port_id=749e06f0-8fbc-42b0-bbf4-95d75f6733d1, reason: Instance ad917577-5285-4f8d-8096-d83424deba33 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1168.140132] env[61852]: INFO nova.compute.manager [-] [instance: ad917577-5285-4f8d-8096-d83424deba33] Took 1.43 seconds to deallocate network for instance. [ 1168.647252] env[61852]: DEBUG oslo_concurrency.lockutils [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.647543] env[61852]: DEBUG oslo_concurrency.lockutils [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.647793] env[61852]: DEBUG nova.objects.instance [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lazy-loading 'resources' on Instance uuid ad917577-5285-4f8d-8096-d83424deba33 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1169.189964] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b959f50b-0872-40e6-95de-7d2546ef747a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.197243] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0179c05-73b5-4360-86d3-b4348ee3b816 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.227293] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1f98de-1fd3-4749-b1f7-13b6874a11f0 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.234039] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34001c3b-bf2a-4643-8ddd-7021f2cf4cfa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.246715] env[61852]: DEBUG nova.compute.provider_tree [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.750107] env[61852]: DEBUG nova.scheduler.client.report [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1170.255376] env[61852]: DEBUG oslo_concurrency.lockutils [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.608s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.275606] env[61852]: INFO nova.scheduler.client.report [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleted allocations for instance ad917577-5285-4f8d-8096-d83424deba33 [ 1170.783867] env[61852]: DEBUG oslo_concurrency.lockutils [None req-605867f7-088d-4656-8467-1fa43d8530e2 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "ad917577-5285-4f8d-8096-d83424deba33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.181s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.340995] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "9f86b477-728e-4187-801b-780781fefb15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.341273] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.843084] env[61852]: DEBUG nova.compute.manager [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1173.368451] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.368730] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.370294] env[61852]: INFO nova.compute.claims [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1174.413851] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b2c1fd-93e5-4c88-b514-167f26edc69d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.422865] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7ba1af-af47-43a6-a89d-af07a2f84904 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.451460] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7b891c-cb45-4964-8eec-9384e81fd4a3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.458196] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ceac71d-bf7a-42b2-b69c-cb7375b4d6aa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.470663] env[61852]: DEBUG nova.compute.provider_tree [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1174.973365] env[61852]: DEBUG nova.scheduler.client.report [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1175.479354] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.479881] env[61852]: DEBUG nova.compute.manager [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1175.984739] env[61852]: DEBUG nova.compute.utils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1175.986759] env[61852]: DEBUG nova.compute.manager [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1175.986759] env[61852]: DEBUG nova.network.neutron [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1176.033289] env[61852]: DEBUG nova.policy [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81c41a76b275406c83c80068659e2b04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b019fd876c14428bd8f2de5fa66da4d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 1176.281848] env[61852]: DEBUG nova.network.neutron [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Successfully created port: b7508435-bcf5-4b82-b506-dd5017524b27 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1176.489587] env[61852]: DEBUG nova.compute.manager [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1177.501874] env[61852]: DEBUG nova.compute.manager [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1177.527617] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1177.527877] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1177.528054] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1177.528252] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1177.528403] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1177.528571] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1177.528780] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1177.528945] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1177.529126] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1177.529295] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1177.529469] env[61852]: DEBUG nova.virt.hardware [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1177.530388] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a366bb76-a767-4959-9d6a-a53f9b3a5e7a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.538120] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68006443-d591-4373-8f5d-11657e8cef17 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.641770] env[61852]: DEBUG nova.compute.manager [req-3649a9c4-03c0-4525-afb1-e5ed0aba542a req-5ec349e9-a2ef-4153-8926-914a8285103b service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Received event network-vif-plugged-b7508435-bcf5-4b82-b506-dd5017524b27 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1177.642014] env[61852]: DEBUG oslo_concurrency.lockutils [req-3649a9c4-03c0-4525-afb1-e5ed0aba542a req-5ec349e9-a2ef-4153-8926-914a8285103b service nova] Acquiring lock "9f86b477-728e-4187-801b-780781fefb15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.642243] env[61852]: DEBUG oslo_concurrency.lockutils [req-3649a9c4-03c0-4525-afb1-e5ed0aba542a req-5ec349e9-a2ef-4153-8926-914a8285103b service nova] Lock "9f86b477-728e-4187-801b-780781fefb15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.642414] env[61852]: DEBUG oslo_concurrency.lockutils [req-3649a9c4-03c0-4525-afb1-e5ed0aba542a req-5ec349e9-a2ef-4153-8926-914a8285103b service nova] Lock "9f86b477-728e-4187-801b-780781fefb15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.642585] env[61852]: DEBUG nova.compute.manager [req-3649a9c4-03c0-4525-afb1-e5ed0aba542a req-5ec349e9-a2ef-4153-8926-914a8285103b service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] No waiting events found dispatching network-vif-plugged-b7508435-bcf5-4b82-b506-dd5017524b27 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1177.642752] env[61852]: WARNING nova.compute.manager [req-3649a9c4-03c0-4525-afb1-e5ed0aba542a req-5ec349e9-a2ef-4153-8926-914a8285103b service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Received unexpected event network-vif-plugged-b7508435-bcf5-4b82-b506-dd5017524b27 for instance with vm_state building and task_state spawning. [ 1177.723937] env[61852]: DEBUG nova.network.neutron [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Successfully updated port: b7508435-bcf5-4b82-b506-dd5017524b27 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1178.225966] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.226144] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.226303] env[61852]: DEBUG nova.network.neutron [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1178.758835] env[61852]: DEBUG nova.network.neutron [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1178.873494] env[61852]: DEBUG nova.network.neutron [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Updating instance_info_cache with network_info: [{"id": "b7508435-bcf5-4b82-b506-dd5017524b27", "address": "fa:16:3e:5e:8a:60", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7508435-bc", "ovs_interfaceid": "b7508435-bcf5-4b82-b506-dd5017524b27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.376517] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.376831] env[61852]: DEBUG nova.compute.manager [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Instance network_info: |[{"id": "b7508435-bcf5-4b82-b506-dd5017524b27", "address": "fa:16:3e:5e:8a:60", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7508435-bc", "ovs_interfaceid": "b7508435-bcf5-4b82-b506-dd5017524b27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1179.377288] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:8a:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7508435-bcf5-4b82-b506-dd5017524b27', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1179.384874] env[61852]: DEBUG oslo.service.loopingcall [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1179.385093] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f86b477-728e-4187-801b-780781fefb15] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1179.385327] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f413f0b9-819e-4f76-af62-708855f34f2f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.406146] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1179.406146] env[61852]: value = "task-1293452" [ 1179.406146] env[61852]: _type = "Task" [ 1179.406146] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.413653] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293452, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.667073] env[61852]: DEBUG nova.compute.manager [req-9bf8ea78-0672-4851-8b36-865c41354903 req-b91ae981-59bb-46a5-932e-11d68c2cc7ae service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Received event network-changed-b7508435-bcf5-4b82-b506-dd5017524b27 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1179.667279] env[61852]: DEBUG nova.compute.manager [req-9bf8ea78-0672-4851-8b36-865c41354903 req-b91ae981-59bb-46a5-932e-11d68c2cc7ae service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Refreshing instance network info cache due to event network-changed-b7508435-bcf5-4b82-b506-dd5017524b27. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1179.667508] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bf8ea78-0672-4851-8b36-865c41354903 req-b91ae981-59bb-46a5-932e-11d68c2cc7ae service nova] Acquiring lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.667685] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bf8ea78-0672-4851-8b36-865c41354903 req-b91ae981-59bb-46a5-932e-11d68c2cc7ae service nova] Acquired lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.667863] env[61852]: DEBUG nova.network.neutron [req-9bf8ea78-0672-4851-8b36-865c41354903 req-b91ae981-59bb-46a5-932e-11d68c2cc7ae service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Refreshing network info cache for port b7508435-bcf5-4b82-b506-dd5017524b27 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1179.916561] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293452, 'name': CreateVM_Task, 'duration_secs': 0.293816} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.916908] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f86b477-728e-4187-801b-780781fefb15] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1179.917471] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.917684] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.918030] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1179.918291] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cdba4e5-7945-4eed-966a-c72a12c71eb6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.922553] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1179.922553] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52680465-c52b-8c36-9927-295d5f60f1d6" [ 1179.922553] env[61852]: _type = "Task" [ 1179.922553] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.929979] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52680465-c52b-8c36-9927-295d5f60f1d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.351555] env[61852]: DEBUG nova.network.neutron [req-9bf8ea78-0672-4851-8b36-865c41354903 req-b91ae981-59bb-46a5-932e-11d68c2cc7ae service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Updated VIF entry in instance network info cache for port b7508435-bcf5-4b82-b506-dd5017524b27. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1180.351949] env[61852]: DEBUG nova.network.neutron [req-9bf8ea78-0672-4851-8b36-865c41354903 req-b91ae981-59bb-46a5-932e-11d68c2cc7ae service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Updating instance_info_cache with network_info: [{"id": "b7508435-bcf5-4b82-b506-dd5017524b27", "address": "fa:16:3e:5e:8a:60", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7508435-bc", "ovs_interfaceid": "b7508435-bcf5-4b82-b506-dd5017524b27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.433848] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52680465-c52b-8c36-9927-295d5f60f1d6, 'name': SearchDatastore_Task, 'duration_secs': 0.010158} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.434157] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.434387] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1180.434618] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.434767] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.435034] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1180.435221] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6686f9f3-9b2d-4496-a0dc-2be8d8e4fbec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.442919] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1180.443112] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1180.443772] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69c0b40c-74f7-42e1-ae45-862d89e1038f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.448350] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1180.448350] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52adeae4-0279-2cd3-8d31-389f627c8fda" [ 1180.448350] env[61852]: _type = "Task" [ 1180.448350] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.455085] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52adeae4-0279-2cd3-8d31-389f627c8fda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.855179] env[61852]: DEBUG oslo_concurrency.lockutils [req-9bf8ea78-0672-4851-8b36-865c41354903 req-b91ae981-59bb-46a5-932e-11d68c2cc7ae service nova] Releasing lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.958378] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52adeae4-0279-2cd3-8d31-389f627c8fda, 'name': SearchDatastore_Task, 'duration_secs': 0.007681} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.959719] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f155b097-4823-4dde-b956-470fb2f9fbe3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.964257] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1180.964257] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522f7996-65d8-13ba-70eb-bbfb46576bc1" [ 1180.964257] env[61852]: _type = "Task" [ 1180.964257] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.971369] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522f7996-65d8-13ba-70eb-bbfb46576bc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.473995] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]522f7996-65d8-13ba-70eb-bbfb46576bc1, 'name': SearchDatastore_Task, 'duration_secs': 0.008789} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.474284] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.474546] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 9f86b477-728e-4187-801b-780781fefb15/9f86b477-728e-4187-801b-780781fefb15.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1181.474820] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d27e3f01-63a5-41f9-88de-261f73ceb821 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.481406] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1181.481406] env[61852]: value = "task-1293453" [ 1181.481406] env[61852]: _type = "Task" [ 1181.481406] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.488275] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293453, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.991680] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293453, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.426983} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.992089] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 9f86b477-728e-4187-801b-780781fefb15/9f86b477-728e-4187-801b-780781fefb15.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1181.992171] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1181.992404] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-183c82af-3967-4429-a852-849ac45ac152 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.999948] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1181.999948] env[61852]: value = "task-1293454" [ 1181.999948] env[61852]: _type = "Task" [ 1181.999948] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.006952] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293454, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.510215] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293454, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059935} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.510556] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1182.511331] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f4c027-fc46-44da-84e6-eba54f16df3c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.532418] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 9f86b477-728e-4187-801b-780781fefb15/9f86b477-728e-4187-801b-780781fefb15.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1182.532679] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a212c9d5-3d8f-4a84-8752-683c3ff62755 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.551536] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1182.551536] env[61852]: value = "task-1293455" [ 1182.551536] env[61852]: _type = "Task" [ 1182.551536] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.558880] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293455, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.062160] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293455, 'name': ReconfigVM_Task, 'duration_secs': 0.264956} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.062585] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 9f86b477-728e-4187-801b-780781fefb15/9f86b477-728e-4187-801b-780781fefb15.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1183.063081] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d003795b-3923-427c-af5b-7f5b389d0e06 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.069507] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1183.069507] env[61852]: value = "task-1293456" [ 1183.069507] env[61852]: _type = "Task" [ 1183.069507] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.077983] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293456, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.579587] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293456, 'name': Rename_Task, 'duration_secs': 0.133025} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.579895] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1183.580173] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5a649e7-7aae-44f9-8960-e1c832a2b244 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.586066] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1183.586066] env[61852]: value = "task-1293457" [ 1183.586066] env[61852]: _type = "Task" [ 1183.586066] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.593298] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293457, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.096593] env[61852]: DEBUG oslo_vmware.api [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293457, 'name': PowerOnVM_Task, 'duration_secs': 0.442709} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.096958] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1184.097085] env[61852]: INFO nova.compute.manager [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Took 6.59 seconds to spawn the instance on the hypervisor. [ 1184.097243] env[61852]: DEBUG nova.compute.manager [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1184.098015] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccf0dce-b984-4f12-b06d-283ba139a3ed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.614726] env[61852]: INFO nova.compute.manager [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Took 11.27 seconds to build instance. [ 1185.116846] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e392c3ee-671a-4c30-9d71-9c756378ae31 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.775s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.287914] env[61852]: DEBUG nova.compute.manager [req-b7d53653-6018-43e7-8d20-c68ab90222b9 req-e3f354af-d3b5-4600-ab91-a7032bad0b26 service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Received event network-changed-b7508435-bcf5-4b82-b506-dd5017524b27 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1185.288117] env[61852]: DEBUG nova.compute.manager [req-b7d53653-6018-43e7-8d20-c68ab90222b9 req-e3f354af-d3b5-4600-ab91-a7032bad0b26 service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Refreshing instance network info cache due to event network-changed-b7508435-bcf5-4b82-b506-dd5017524b27. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1185.288346] env[61852]: DEBUG oslo_concurrency.lockutils [req-b7d53653-6018-43e7-8d20-c68ab90222b9 req-e3f354af-d3b5-4600-ab91-a7032bad0b26 service nova] Acquiring lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.288570] env[61852]: DEBUG oslo_concurrency.lockutils [req-b7d53653-6018-43e7-8d20-c68ab90222b9 req-e3f354af-d3b5-4600-ab91-a7032bad0b26 service nova] Acquired lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.288680] env[61852]: DEBUG nova.network.neutron [req-b7d53653-6018-43e7-8d20-c68ab90222b9 req-e3f354af-d3b5-4600-ab91-a7032bad0b26 service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Refreshing network info cache for port b7508435-bcf5-4b82-b506-dd5017524b27 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1185.299217] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.299434] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.299640] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.299883] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.300012] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.302061] env[61852]: INFO nova.compute.manager [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Terminating instance [ 1185.303736] env[61852]: DEBUG nova.compute.manager [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1185.303936] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1185.304754] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126b526a-c8ac-4ff7-939d-19bdd4bf5ab1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.312233] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1185.312471] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f6c517d-3cd5-4200-b5ad-f979eb652599 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.317933] env[61852]: DEBUG oslo_vmware.api [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1185.317933] env[61852]: value = "task-1293458" [ 1185.317933] env[61852]: _type = "Task" [ 1185.317933] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.325167] env[61852]: DEBUG oslo_vmware.api [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.830596] env[61852]: DEBUG oslo_vmware.api [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293458, 'name': PowerOffVM_Task, 'duration_secs': 0.198082} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.830746] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1185.830886] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1185.831164] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7793d039-8b37-40a9-91c2-29ce78d96786 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.889464] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1185.889696] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1185.889881] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Deleting the datastore file [datastore1] d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1185.890173] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a877022-358d-449f-a72d-d2352bf556bf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.896470] env[61852]: DEBUG oslo_vmware.api [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1185.896470] env[61852]: value = "task-1293460" [ 1185.896470] env[61852]: _type = "Task" [ 1185.896470] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.903461] env[61852]: DEBUG oslo_vmware.api [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293460, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.155130] env[61852]: DEBUG nova.network.neutron [req-b7d53653-6018-43e7-8d20-c68ab90222b9 req-e3f354af-d3b5-4600-ab91-a7032bad0b26 service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Updated VIF entry in instance network info cache for port b7508435-bcf5-4b82-b506-dd5017524b27. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1186.155524] env[61852]: DEBUG nova.network.neutron [req-b7d53653-6018-43e7-8d20-c68ab90222b9 req-e3f354af-d3b5-4600-ab91-a7032bad0b26 service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Updating instance_info_cache with network_info: [{"id": "b7508435-bcf5-4b82-b506-dd5017524b27", "address": "fa:16:3e:5e:8a:60", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7508435-bc", "ovs_interfaceid": "b7508435-bcf5-4b82-b506-dd5017524b27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.406789] env[61852]: DEBUG oslo_vmware.api [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293460, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126694} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.407069] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.407267] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1186.407450] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1186.407626] env[61852]: INFO nova.compute.manager [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1186.407917] env[61852]: DEBUG oslo.service.loopingcall [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1186.408136] env[61852]: DEBUG nova.compute.manager [-] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1186.408233] env[61852]: DEBUG nova.network.neutron [-] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1186.658529] env[61852]: DEBUG oslo_concurrency.lockutils [req-b7d53653-6018-43e7-8d20-c68ab90222b9 req-e3f354af-d3b5-4600-ab91-a7032bad0b26 service nova] Releasing lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.313016] env[61852]: DEBUG nova.compute.manager [req-9b66842f-cb8a-4d98-9061-547f51caf2f8 req-62e2b6be-41dd-4cba-84f5-78b10399cc68 service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Received event network-vif-deleted-537951d4-2e0a-45fd-a9eb-39ddf930b39d {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1187.313277] env[61852]: INFO nova.compute.manager [req-9b66842f-cb8a-4d98-9061-547f51caf2f8 req-62e2b6be-41dd-4cba-84f5-78b10399cc68 service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Neutron deleted interface 537951d4-2e0a-45fd-a9eb-39ddf930b39d; detaching it from the instance and deleting it from the info cache [ 1187.313405] env[61852]: DEBUG nova.network.neutron [req-9b66842f-cb8a-4d98-9061-547f51caf2f8 req-62e2b6be-41dd-4cba-84f5-78b10399cc68 service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.319635] env[61852]: DEBUG nova.network.neutron [-] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.815942] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5d3b61a-6d97-4593-9646-323ff762667d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.821707] env[61852]: INFO nova.compute.manager [-] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Took 1.41 seconds to deallocate network for instance. [ 1187.827008] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ed3ddd-89f0-43fa-b99d-37b265322bbf {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.851495] env[61852]: DEBUG nova.compute.manager [req-9b66842f-cb8a-4d98-9061-547f51caf2f8 req-62e2b6be-41dd-4cba-84f5-78b10399cc68 service nova] [instance: d8baa4c3-7da1-450c-8bef-336fbb34ceab] Detach interface failed, port_id=537951d4-2e0a-45fd-a9eb-39ddf930b39d, reason: Instance d8baa4c3-7da1-450c-8bef-336fbb34ceab could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1188.341766] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.342078] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.342294] env[61852]: DEBUG nova.objects.instance [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'resources' on Instance uuid d8baa4c3-7da1-450c-8bef-336fbb34ceab {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.887569] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494941f8-32c2-4d2d-8174-d9a7967f8be6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.895263] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2045e58b-57d1-436b-adff-8e7277d86a72 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.926444] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6587bcf-aeb0-459c-b1bd-2ffcf24f238c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.934265] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe29938-1433-4bbd-b36a-1399db2cdcd7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.948878] env[61852]: DEBUG nova.compute.provider_tree [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.451995] env[61852]: DEBUG nova.scheduler.client.report [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1189.957589] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.615s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.981014] env[61852]: INFO nova.scheduler.client.report [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Deleted allocations for instance d8baa4c3-7da1-450c-8bef-336fbb34ceab [ 1190.488224] env[61852]: DEBUG oslo_concurrency.lockutils [None req-d726a086-51ea-474d-af99-8ae726d0aa4f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d8baa4c3-7da1-450c-8bef-336fbb34ceab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.188s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.163867] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d0d109ac-f203-4b68-b973-32d868d8270f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.164168] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.666417] env[61852]: DEBUG nova.compute.manager [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1193.184718] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.184978] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.186422] env[61852]: INFO nova.compute.claims [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1194.230213] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1388c1-cf0a-4666-b5ad-b11325890a36 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.237393] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cec927-ae99-474d-affd-d68af7f24379 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.265853] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a00a8de-09b9-4018-96bc-04e3baa57ed7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.272226] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00505599-b63b-4d02-b465-2489f9363365 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.284438] env[61852]: DEBUG nova.compute.provider_tree [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.787232] env[61852]: DEBUG nova.scheduler.client.report [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1195.156858] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.157174] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1195.292339] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.107s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.292818] env[61852]: DEBUG nova.compute.manager [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1195.798159] env[61852]: DEBUG nova.compute.utils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1195.799873] env[61852]: DEBUG nova.compute.manager [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1195.800057] env[61852]: DEBUG nova.network.neutron [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1195.846071] env[61852]: DEBUG nova.policy [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76b12b5f5eb843418b31ed30f6f5520c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a10be4b0f16c432c87b39b211fbf2fee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 1196.139559] env[61852]: DEBUG nova.network.neutron [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Successfully created port: 2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1196.152532] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.157062] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.157251] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.157406] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.303031] env[61852]: DEBUG nova.compute.manager [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1197.157406] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1197.312140] env[61852]: DEBUG nova.compute.manager [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1197.336300] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1197.336549] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1197.336712] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1197.336897] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1197.337059] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1197.337217] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1197.337424] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1197.337589] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1197.337762] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1197.337951] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1197.338147] env[61852]: DEBUG nova.virt.hardware [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1197.339011] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2603302-8b39-43a1-a847-56ccca7f8b81 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.346576] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f92d39-e509-4e21-861b-44a2e83ac3b8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.498328] env[61852]: DEBUG nova.compute.manager [req-c467aa83-1e10-4eae-bc96-2e30ca3cf148 req-94f71e7c-baad-43e5-8c60-6f938b1babb8 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Received event network-vif-plugged-2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1197.498557] env[61852]: DEBUG oslo_concurrency.lockutils [req-c467aa83-1e10-4eae-bc96-2e30ca3cf148 req-94f71e7c-baad-43e5-8c60-6f938b1babb8 service nova] Acquiring lock "d0d109ac-f203-4b68-b973-32d868d8270f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.498785] env[61852]: DEBUG oslo_concurrency.lockutils [req-c467aa83-1e10-4eae-bc96-2e30ca3cf148 req-94f71e7c-baad-43e5-8c60-6f938b1babb8 service nova] Lock "d0d109ac-f203-4b68-b973-32d868d8270f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.498966] env[61852]: DEBUG oslo_concurrency.lockutils [req-c467aa83-1e10-4eae-bc96-2e30ca3cf148 req-94f71e7c-baad-43e5-8c60-6f938b1babb8 service nova] Lock "d0d109ac-f203-4b68-b973-32d868d8270f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.499161] env[61852]: DEBUG nova.compute.manager [req-c467aa83-1e10-4eae-bc96-2e30ca3cf148 req-94f71e7c-baad-43e5-8c60-6f938b1babb8 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] No waiting events found dispatching network-vif-plugged-2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1197.499314] env[61852]: WARNING nova.compute.manager [req-c467aa83-1e10-4eae-bc96-2e30ca3cf148 req-94f71e7c-baad-43e5-8c60-6f938b1babb8 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Received unexpected event network-vif-plugged-2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9 for instance with vm_state building and task_state spawning. [ 1197.583651] env[61852]: DEBUG nova.network.neutron [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Successfully updated port: 2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1197.660517] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.660751] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.660927] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.661096] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1197.661972] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed71a2a-82c6-4201-ae0a-8a7ee89507c6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.669992] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d608d3-8e43-4002-89e0-a6a7a967f9c5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.683023] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92419fb-f7c4-4c5b-8169-1524c5af9dec {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.688873] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1086238-7a3a-4007-806c-0c6b874b47a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.717513] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181113MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1197.717651] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.717866] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.084986] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1198.085184] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquired lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.085340] env[61852]: DEBUG nova.network.neutron [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1198.619875] env[61852]: DEBUG nova.network.neutron [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1198.738686] env[61852]: DEBUG nova.network.neutron [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updating instance_info_cache with network_info: [{"id": "2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9", "address": "fa:16:3e:59:80:45", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f4cf9a5-1b", "ovs_interfaceid": "2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.742808] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 9f86b477-728e-4187-801b-780781fefb15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1198.742978] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d0d109ac-f203-4b68-b973-32d868d8270f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1198.743147] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1198.743288] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1198.777689] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60527d63-40bb-44f3-ae6a-36b298387eb2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.785199] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d200ec3c-72a1-4263-95f6-3521c2bb6fe6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.813580] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a286232-ab7f-45ab-91ab-1d3fd0215d18 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.820170] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07a0407-12b3-4f73-bc2f-04ecd996928e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.833492] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.242184] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Releasing lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.242475] env[61852]: DEBUG nova.compute.manager [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Instance network_info: |[{"id": "2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9", "address": "fa:16:3e:59:80:45", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f4cf9a5-1b", "ovs_interfaceid": "2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1199.242926] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:80:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0c293d47-74c0-49d7-a474-cdb643080f6f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1199.250211] env[61852]: DEBUG oslo.service.loopingcall [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1199.250422] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1199.250646] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5627d82-0ee5-42fc-8035-cdbc2d006eca {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.270931] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1199.270931] env[61852]: value = "task-1293461" [ 1199.270931] env[61852]: _type = "Task" [ 1199.270931] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.278334] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293461, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.336566] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1199.526082] env[61852]: DEBUG nova.compute.manager [req-7d608c8f-7e3a-4267-8375-e3c449c63e7e req-ed98a308-bd8e-4832-839e-cda6c0992fa1 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Received event network-changed-2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1199.526082] env[61852]: DEBUG nova.compute.manager [req-7d608c8f-7e3a-4267-8375-e3c449c63e7e req-ed98a308-bd8e-4832-839e-cda6c0992fa1 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Refreshing instance network info cache due to event network-changed-2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1199.526291] env[61852]: DEBUG oslo_concurrency.lockutils [req-7d608c8f-7e3a-4267-8375-e3c449c63e7e req-ed98a308-bd8e-4832-839e-cda6c0992fa1 service nova] Acquiring lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.526447] env[61852]: DEBUG oslo_concurrency.lockutils [req-7d608c8f-7e3a-4267-8375-e3c449c63e7e req-ed98a308-bd8e-4832-839e-cda6c0992fa1 service nova] Acquired lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.526610] env[61852]: DEBUG nova.network.neutron [req-7d608c8f-7e3a-4267-8375-e3c449c63e7e req-ed98a308-bd8e-4832-839e-cda6c0992fa1 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Refreshing network info cache for port 2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1199.781285] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293461, 'name': CreateVM_Task, 'duration_secs': 0.299128} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.781714] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1199.782125] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.782306] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.782668] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1199.782928] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11015e7e-07c9-4093-809a-929d394363fa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.787221] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1199.787221] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5235851b-61f6-9aa7-3048-700bfb6d4ecf" [ 1199.787221] env[61852]: _type = "Task" [ 1199.787221] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.794989] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5235851b-61f6-9aa7-3048-700bfb6d4ecf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.840753] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1199.840923] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.123s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.208225] env[61852]: DEBUG nova.network.neutron [req-7d608c8f-7e3a-4267-8375-e3c449c63e7e req-ed98a308-bd8e-4832-839e-cda6c0992fa1 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updated VIF entry in instance network info cache for port 2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1200.208594] env[61852]: DEBUG nova.network.neutron [req-7d608c8f-7e3a-4267-8375-e3c449c63e7e req-ed98a308-bd8e-4832-839e-cda6c0992fa1 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updating instance_info_cache with network_info: [{"id": "2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9", "address": "fa:16:3e:59:80:45", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f4cf9a5-1b", "ovs_interfaceid": "2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.296740] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]5235851b-61f6-9aa7-3048-700bfb6d4ecf, 'name': SearchDatastore_Task, 'duration_secs': 0.009866} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.297040] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.297286] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1200.297534] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.297664] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.297870] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1200.298136] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a55a6a1e-0dbf-4fcb-998b-8eeec1e46e4a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.305618] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1200.305767] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1200.306438] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1049003b-20af-408b-8e05-1d1a3504ede8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.311061] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1200.311061] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526cbe79-88bf-ea1a-aac1-dfde64530e2c" [ 1200.311061] env[61852]: _type = "Task" [ 1200.311061] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.317727] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526cbe79-88bf-ea1a-aac1-dfde64530e2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.711702] env[61852]: DEBUG oslo_concurrency.lockutils [req-7d608c8f-7e3a-4267-8375-e3c449c63e7e req-ed98a308-bd8e-4832-839e-cda6c0992fa1 service nova] Releasing lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.821366] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]526cbe79-88bf-ea1a-aac1-dfde64530e2c, 'name': SearchDatastore_Task, 'duration_secs': 0.007912} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.822111] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e454b56-df09-4660-a205-45d7a41449d7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.826710] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1200.826710] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52255ae6-a58a-fd53-d35a-3de3b12cbe72" [ 1200.826710] env[61852]: _type = "Task" [ 1200.826710] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.833565] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52255ae6-a58a-fd53-d35a-3de3b12cbe72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.337754] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52255ae6-a58a-fd53-d35a-3de3b12cbe72, 'name': SearchDatastore_Task, 'duration_secs': 0.009047} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.338055] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.338313] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d0d109ac-f203-4b68-b973-32d868d8270f/d0d109ac-f203-4b68-b973-32d868d8270f.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1201.338567] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09722d45-0710-44e0-861d-e97da632eb36 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.345170] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1201.345170] env[61852]: value = "task-1293462" [ 1201.345170] env[61852]: _type = "Task" [ 1201.345170] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.352061] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293462, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.841021] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.841389] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1201.841389] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Rebuilding the list of instances to heal {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1201.855164] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293462, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434292} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.855445] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] d0d109ac-f203-4b68-b973-32d868d8270f/d0d109ac-f203-4b68-b973-32d868d8270f.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1201.855660] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1201.855898] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9260e344-d150-4b22-bd54-7cd0760a1212 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.863382] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1201.863382] env[61852]: value = "task-1293463" [ 1201.863382] env[61852]: _type = "Task" [ 1201.863382] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.873266] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293463, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.344916] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Skipping network cache update for instance because it is Building. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1202.370692] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.370842] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquired lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.370986] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 9f86b477-728e-4187-801b-780781fefb15] Forcefully refreshing network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1202.371153] env[61852]: DEBUG nova.objects.instance [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lazy-loading 'info_cache' on Instance uuid 9f86b477-728e-4187-801b-780781fefb15 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1202.375374] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293463, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061752} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.375810] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1202.376567] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a7734f-8608-4301-8926-2e5212a5133d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.398147] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] d0d109ac-f203-4b68-b973-32d868d8270f/d0d109ac-f203-4b68-b973-32d868d8270f.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1202.398921] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dd1b6fe-cf7b-4bc4-955a-9f2a004b91cb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.418183] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1202.418183] env[61852]: value = "task-1293464" [ 1202.418183] env[61852]: _type = "Task" [ 1202.418183] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.426012] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293464, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.928222] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293464, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.428216] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293464, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.928597] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293464, 'name': ReconfigVM_Task, 'duration_secs': 1.258889} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.929043] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Reconfigured VM instance instance-0000006b to attach disk [datastore1] d0d109ac-f203-4b68-b973-32d868d8270f/d0d109ac-f203-4b68-b973-32d868d8270f.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1203.930045] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9268e93e-263b-4ca1-ab53-e312a97d4a56 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.936907] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1203.936907] env[61852]: value = "task-1293465" [ 1203.936907] env[61852]: _type = "Task" [ 1203.936907] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.946188] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293465, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.090397] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 9f86b477-728e-4187-801b-780781fefb15] Updating instance_info_cache with network_info: [{"id": "b7508435-bcf5-4b82-b506-dd5017524b27", "address": "fa:16:3e:5e:8a:60", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7508435-bc", "ovs_interfaceid": "b7508435-bcf5-4b82-b506-dd5017524b27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.446568] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293465, 'name': Rename_Task, 'duration_secs': 0.137462} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.446846] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1204.447101] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb22d092-6a00-4734-b40f-9973cb0e76a9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.453155] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1204.453155] env[61852]: value = "task-1293466" [ 1204.453155] env[61852]: _type = "Task" [ 1204.453155] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.460187] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293466, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.593062] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Releasing lock "refresh_cache-9f86b477-728e-4187-801b-780781fefb15" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.593367] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: 9f86b477-728e-4187-801b-780781fefb15] Updated the network info_cache for instance {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1204.593594] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1204.593761] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1204.962875] env[61852]: DEBUG oslo_vmware.api [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293466, 'name': PowerOnVM_Task, 'duration_secs': 0.413871} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.963261] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1204.963386] env[61852]: INFO nova.compute.manager [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Took 7.65 seconds to spawn the instance on the hypervisor. [ 1204.963537] env[61852]: DEBUG nova.compute.manager [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1204.964294] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5fd33ac-69d9-49da-a7c1-1ccf5c38d82b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.483884] env[61852]: INFO nova.compute.manager [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Took 12.31 seconds to build instance. [ 1205.986328] env[61852]: DEBUG oslo_concurrency.lockutils [None req-45536161-5806-43b4-beb0-33c7837d0a62 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.822s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.205190] env[61852]: DEBUG nova.compute.manager [req-dd3a56c1-0e9b-4c0e-82fa-a52c88a7a226 req-e47a5987-9059-4329-8d6f-093ae9c67623 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Received event network-changed-2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1206.205433] env[61852]: DEBUG nova.compute.manager [req-dd3a56c1-0e9b-4c0e-82fa-a52c88a7a226 req-e47a5987-9059-4329-8d6f-093ae9c67623 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Refreshing instance network info cache due to event network-changed-2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1206.205623] env[61852]: DEBUG oslo_concurrency.lockutils [req-dd3a56c1-0e9b-4c0e-82fa-a52c88a7a226 req-e47a5987-9059-4329-8d6f-093ae9c67623 service nova] Acquiring lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.205791] env[61852]: DEBUG oslo_concurrency.lockutils [req-dd3a56c1-0e9b-4c0e-82fa-a52c88a7a226 req-e47a5987-9059-4329-8d6f-093ae9c67623 service nova] Acquired lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.206046] env[61852]: DEBUG nova.network.neutron [req-dd3a56c1-0e9b-4c0e-82fa-a52c88a7a226 req-e47a5987-9059-4329-8d6f-093ae9c67623 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Refreshing network info cache for port 2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1206.912536] env[61852]: DEBUG nova.network.neutron [req-dd3a56c1-0e9b-4c0e-82fa-a52c88a7a226 req-e47a5987-9059-4329-8d6f-093ae9c67623 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updated VIF entry in instance network info cache for port 2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1206.912900] env[61852]: DEBUG nova.network.neutron [req-dd3a56c1-0e9b-4c0e-82fa-a52c88a7a226 req-e47a5987-9059-4329-8d6f-093ae9c67623 service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updating instance_info_cache with network_info: [{"id": "2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9", "address": "fa:16:3e:59:80:45", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f4cf9a5-1b", "ovs_interfaceid": "2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.415897] env[61852]: DEBUG oslo_concurrency.lockutils [req-dd3a56c1-0e9b-4c0e-82fa-a52c88a7a226 req-e47a5987-9059-4329-8d6f-093ae9c67623 service nova] Releasing lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.905888] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1222.289185] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "9f86b477-728e-4187-801b-780781fefb15" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.289438] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.792726] env[61852]: DEBUG nova.compute.utils [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1223.296116] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.353053] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "9f86b477-728e-4187-801b-780781fefb15" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.353521] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.353633] env[61852]: INFO nova.compute.manager [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Attaching volume 1e36cecf-c26f-4406-ab3a-cf66d3ee1958 to /dev/sdb [ 1224.383720] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa5ecaf-7b72-4e9e-8a62-3e0750eb7418 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.390931] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af28abed-b24a-45d4-ba43-fbdb9282189b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.404284] env[61852]: DEBUG nova.virt.block_device [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Updating existing volume attachment record: cf08d5a5-0f97-4429-bd14-fdf0124f8041 {{(pid=61852) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1228.949898] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Volume attach. Driver type: vmdk {{(pid=61852) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1228.950292] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277442', 'volume_id': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'name': 'volume-1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9f86b477-728e-4187-801b-780781fefb15', 'attached_at': '', 'detached_at': '', 'volume_id': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'serial': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1228.951111] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9aeb9b1-04ee-4b9d-baab-0bee88bed4ab {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.967643] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d839d966-7a72-42be-ab1a-8a9a7075be99 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.991492] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-1e36cecf-c26f-4406-ab3a-cf66d3ee1958/volume-1e36cecf-c26f-4406-ab3a-cf66d3ee1958.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1228.991756] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-343afa8d-f4da-4b59-a3f1-5d33e15aca78 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.012043] env[61852]: DEBUG oslo_vmware.api [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1229.012043] env[61852]: value = "task-1293469" [ 1229.012043] env[61852]: _type = "Task" [ 1229.012043] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.019033] env[61852]: DEBUG oslo_vmware.api [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293469, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.521349] env[61852]: DEBUG oslo_vmware.api [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293469, 'name': ReconfigVM_Task, 'duration_secs': 0.338481} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.521622] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-1e36cecf-c26f-4406-ab3a-cf66d3ee1958/volume-1e36cecf-c26f-4406-ab3a-cf66d3ee1958.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1229.526154] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c71a7559-0913-4cda-9064-acb0edd23c26 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.540620] env[61852]: DEBUG oslo_vmware.api [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1229.540620] env[61852]: value = "task-1293470" [ 1229.540620] env[61852]: _type = "Task" [ 1229.540620] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.549519] env[61852]: DEBUG oslo_vmware.api [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293470, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.049992] env[61852]: DEBUG oslo_vmware.api [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293470, 'name': ReconfigVM_Task, 'duration_secs': 0.155565} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.050337] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277442', 'volume_id': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'name': 'volume-1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9f86b477-728e-4187-801b-780781fefb15', 'attached_at': '', 'detached_at': '', 'volume_id': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'serial': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1231.084581] env[61852]: DEBUG nova.objects.instance [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lazy-loading 'flavor' on Instance uuid 9f86b477-728e-4187-801b-780781fefb15 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1231.591118] env[61852]: DEBUG oslo_concurrency.lockutils [None req-aeba6a7a-99f8-4a66-81fa-07c92b2dc52f tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.237s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.811393] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "9f86b477-728e-4187-801b-780781fefb15" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.811655] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.314649] env[61852]: INFO nova.compute.manager [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Detaching volume 1e36cecf-c26f-4406-ab3a-cf66d3ee1958 [ 1232.346222] env[61852]: INFO nova.virt.block_device [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Attempting to driver detach volume 1e36cecf-c26f-4406-ab3a-cf66d3ee1958 from mountpoint /dev/sdb [ 1232.346470] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Volume detach. Driver type: vmdk {{(pid=61852) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1232.346658] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277442', 'volume_id': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'name': 'volume-1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9f86b477-728e-4187-801b-780781fefb15', 'attached_at': '', 'detached_at': '', 'volume_id': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'serial': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1232.347532] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0533dc-a2ec-40b6-a33e-dce48a3ab0ad {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.368123] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc8f522-cf19-4c41-96c7-e90fd89bfbe6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.374432] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797435d6-37a7-464b-a768-9edfc4e3635a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.393354] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627f9c1a-fe99-4bd0-8966-fc09be976c86 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.408155] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] The volume has not been displaced from its original location: [datastore2] volume-1e36cecf-c26f-4406-ab3a-cf66d3ee1958/volume-1e36cecf-c26f-4406-ab3a-cf66d3ee1958.vmdk. No consolidation needed. {{(pid=61852) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1232.413181] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1232.413431] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d261555-5e8d-48f7-a8a2-6793c2d7934d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.430745] env[61852]: DEBUG oslo_vmware.api [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1232.430745] env[61852]: value = "task-1293471" [ 1232.430745] env[61852]: _type = "Task" [ 1232.430745] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.439629] env[61852]: DEBUG oslo_vmware.api [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293471, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.940600] env[61852]: DEBUG oslo_vmware.api [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293471, 'name': ReconfigVM_Task, 'duration_secs': 0.230093} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.940897] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1232.945384] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fabdacf-c3bb-4ff4-ac23-d97298b16409 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.959356] env[61852]: DEBUG oslo_vmware.api [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1232.959356] env[61852]: value = "task-1293472" [ 1232.959356] env[61852]: _type = "Task" [ 1232.959356] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.968198] env[61852]: DEBUG oslo_vmware.api [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293472, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.469269] env[61852]: DEBUG oslo_vmware.api [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293472, 'name': ReconfigVM_Task, 'duration_secs': 0.129753} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.469606] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277442', 'volume_id': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'name': 'volume-1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9f86b477-728e-4187-801b-780781fefb15', 'attached_at': '', 'detached_at': '', 'volume_id': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958', 'serial': '1e36cecf-c26f-4406-ab3a-cf66d3ee1958'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1234.019011] env[61852]: DEBUG nova.objects.instance [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lazy-loading 'flavor' on Instance uuid 9f86b477-728e-4187-801b-780781fefb15 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1235.026235] env[61852]: DEBUG oslo_concurrency.lockutils [None req-bd1ae377-ab01-4c72-af11-1d49c5332f01 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.214s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1236.034376] env[61852]: DEBUG oslo_concurrency.lockutils [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "9f86b477-728e-4187-801b-780781fefb15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1236.034666] env[61852]: DEBUG oslo_concurrency.lockutils [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1236.034859] env[61852]: DEBUG oslo_concurrency.lockutils [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "9f86b477-728e-4187-801b-780781fefb15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1236.035050] env[61852]: DEBUG oslo_concurrency.lockutils [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1236.035230] env[61852]: DEBUG oslo_concurrency.lockutils [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1236.037214] env[61852]: INFO nova.compute.manager [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Terminating instance [ 1236.038895] env[61852]: DEBUG nova.compute.manager [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1236.039105] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1236.039927] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6a2d96-4a55-4ca5-94ca-7b6438f2d912 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.048031] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1236.048280] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e89b1995-e0f5-4928-b973-ee45b8c1cd12 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.054159] env[61852]: DEBUG oslo_vmware.api [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1236.054159] env[61852]: value = "task-1293473" [ 1236.054159] env[61852]: _type = "Task" [ 1236.054159] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.061801] env[61852]: DEBUG oslo_vmware.api [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.563859] env[61852]: DEBUG oslo_vmware.api [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293473, 'name': PowerOffVM_Task, 'duration_secs': 0.240534} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.564142] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1236.564418] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1236.564751] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0e55a8c-0c24-46f5-a65c-ec92cf423de9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.629467] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1236.629674] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1236.629854] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleting the datastore file [datastore1] 9f86b477-728e-4187-801b-780781fefb15 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1236.630152] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f094124-994e-4291-8c17-f2056b3c4937 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.635848] env[61852]: DEBUG oslo_vmware.api [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1236.635848] env[61852]: value = "task-1293475" [ 1236.635848] env[61852]: _type = "Task" [ 1236.635848] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.642937] env[61852]: DEBUG oslo_vmware.api [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293475, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.147110] env[61852]: DEBUG oslo_vmware.api [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293475, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141702} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.147381] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1237.147567] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1237.147742] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1237.147916] env[61852]: INFO nova.compute.manager [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 9f86b477-728e-4187-801b-780781fefb15] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1237.148209] env[61852]: DEBUG oslo.service.loopingcall [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1237.148423] env[61852]: DEBUG nova.compute.manager [-] [instance: 9f86b477-728e-4187-801b-780781fefb15] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1237.148517] env[61852]: DEBUG nova.network.neutron [-] [instance: 9f86b477-728e-4187-801b-780781fefb15] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1237.196374] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d0d109ac-f203-4b68-b973-32d868d8270f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.196615] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.625155] env[61852]: DEBUG nova.compute.manager [req-f4cd9b36-667e-44f6-88e4-a0c57df252d5 req-b0c1d4d4-8ae2-4ee1-8430-5e920abd2327 service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Received event network-vif-deleted-b7508435-bcf5-4b82-b506-dd5017524b27 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1237.625462] env[61852]: INFO nova.compute.manager [req-f4cd9b36-667e-44f6-88e4-a0c57df252d5 req-b0c1d4d4-8ae2-4ee1-8430-5e920abd2327 service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Neutron deleted interface b7508435-bcf5-4b82-b506-dd5017524b27; detaching it from the instance and deleting it from the info cache [ 1237.625727] env[61852]: DEBUG nova.network.neutron [req-f4cd9b36-667e-44f6-88e4-a0c57df252d5 req-b0c1d4d4-8ae2-4ee1-8430-5e920abd2327 service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.699857] env[61852]: DEBUG nova.compute.utils [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1238.100686] env[61852]: DEBUG nova.network.neutron [-] [instance: 9f86b477-728e-4187-801b-780781fefb15] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.129682] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-661acfa5-aeaa-4b90-8f61-187713987a7b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.140046] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f043ac18-3fda-47da-a133-d97608f50c48 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.163324] env[61852]: DEBUG nova.compute.manager [req-f4cd9b36-667e-44f6-88e4-a0c57df252d5 req-b0c1d4d4-8ae2-4ee1-8430-5e920abd2327 service nova] [instance: 9f86b477-728e-4187-801b-780781fefb15] Detach interface failed, port_id=b7508435-bcf5-4b82-b506-dd5017524b27, reason: Instance 9f86b477-728e-4187-801b-780781fefb15 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1238.202786] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.603241] env[61852]: INFO nova.compute.manager [-] [instance: 9f86b477-728e-4187-801b-780781fefb15] Took 1.45 seconds to deallocate network for instance. [ 1239.109542] env[61852]: DEBUG oslo_concurrency.lockutils [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.109804] env[61852]: DEBUG oslo_concurrency.lockutils [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.110046] env[61852]: DEBUG nova.objects.instance [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lazy-loading 'resources' on Instance uuid 9f86b477-728e-4187-801b-780781fefb15 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.266568] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d0d109ac-f203-4b68-b973-32d868d8270f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.266942] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.267083] env[61852]: INFO nova.compute.manager [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Attaching volume c604b3e9-d6d7-4c15-8d2e-d8691f5800c2 to /dev/sdb [ 1239.297544] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a7e6ef-a241-4b1a-bdd7-90a84638f034 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.304201] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c1e419-b334-4a4b-834f-784cbd5653f5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.317143] env[61852]: DEBUG nova.virt.block_device [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updating existing volume attachment record: b400b597-6b78-4a84-843f-fef7a47ecd47 {{(pid=61852) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1239.654307] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2810ffee-9df2-4d94-84f4-e61cc199790f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.661972] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c538413-82e7-4114-9391-2ae82cfe070b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.691527] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e633274a-4206-4f21-96a4-7bd7b42f183d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.698529] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe85e2bf-fe7b-421d-9a8d-02354ec253fd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.711345] env[61852]: DEBUG nova.compute.provider_tree [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1240.213995] env[61852]: DEBUG nova.scheduler.client.report [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1240.719568] env[61852]: DEBUG oslo_concurrency.lockutils [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.610s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.737545] env[61852]: INFO nova.scheduler.client.report [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleted allocations for instance 9f86b477-728e-4187-801b-780781fefb15 [ 1241.245511] env[61852]: DEBUG oslo_concurrency.lockutils [None req-11480c8b-9d43-475b-b11d-50027b22842e tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "9f86b477-728e-4187-801b-780781fefb15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.211s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.785832] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.786113] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.288784] env[61852]: DEBUG nova.compute.manager [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Starting instance... {{(pid=61852) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1243.807183] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.807499] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.809042] env[61852]: INFO nova.compute.claims [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1243.857641] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Volume attach. Driver type: vmdk {{(pid=61852) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1243.857883] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277443', 'volume_id': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'name': 'volume-c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd0d109ac-f203-4b68-b973-32d868d8270f', 'attached_at': '', 'detached_at': '', 'volume_id': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'serial': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1243.859011] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6169bd-9207-469c-bac5-1f4e1a88dcaa {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.874620] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53928b9-739b-4641-af21-14d0a292f566 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.898086] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-c604b3e9-d6d7-4c15-8d2e-d8691f5800c2/volume-c604b3e9-d6d7-4c15-8d2e-d8691f5800c2.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1243.898342] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3287fff-b1e4-4290-a6f5-f89a99e5c4c1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.915519] env[61852]: DEBUG oslo_vmware.api [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1243.915519] env[61852]: value = "task-1293478" [ 1243.915519] env[61852]: _type = "Task" [ 1243.915519] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.922753] env[61852]: DEBUG oslo_vmware.api [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293478, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.425103] env[61852]: DEBUG oslo_vmware.api [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293478, 'name': ReconfigVM_Task, 'duration_secs': 0.303357} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.425397] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-c604b3e9-d6d7-4c15-8d2e-d8691f5800c2/volume-c604b3e9-d6d7-4c15-8d2e-d8691f5800c2.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1244.429984] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17ddea91-3e0c-4a0b-b0fc-cf8b6575aa58 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.443985] env[61852]: DEBUG oslo_vmware.api [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1244.443985] env[61852]: value = "task-1293479" [ 1244.443985] env[61852]: _type = "Task" [ 1244.443985] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.452711] env[61852]: DEBUG oslo_vmware.api [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293479, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.860033] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8190720f-b448-45ef-bd53-10fc994b85d4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.867605] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441a2295-11df-4712-8d2e-0423693a2f94 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.898339] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5c234a-e42d-469b-bdbb-7cc51ac8670e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.905330] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4aa15e1-2b8c-45a4-abf1-b1ab83dc189c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.924314] env[61852]: DEBUG nova.compute.provider_tree [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.953179] env[61852]: DEBUG oslo_vmware.api [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293479, 'name': ReconfigVM_Task, 'duration_secs': 0.129091} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.953473] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277443', 'volume_id': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'name': 'volume-c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd0d109ac-f203-4b68-b973-32d868d8270f', 'attached_at': '', 'detached_at': '', 'volume_id': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'serial': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1245.429182] env[61852]: DEBUG nova.scheduler.client.report [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1245.934090] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.126s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.934705] env[61852]: DEBUG nova.compute.manager [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Start building networks asynchronously for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1245.987724] env[61852]: DEBUG nova.objects.instance [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'flavor' on Instance uuid d0d109ac-f203-4b68-b973-32d868d8270f {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1246.440395] env[61852]: DEBUG nova.compute.utils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1246.442113] env[61852]: DEBUG nova.compute.manager [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Allocating IP information in the background. {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1246.442290] env[61852]: DEBUG nova.network.neutron [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] allocate_for_instance() {{(pid=61852) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1246.488409] env[61852]: DEBUG nova.policy [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81c41a76b275406c83c80068659e2b04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b019fd876c14428bd8f2de5fa66da4d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61852) authorize /opt/stack/nova/nova/policy.py:201}} [ 1246.493734] env[61852]: DEBUG oslo_concurrency.lockutils [None req-e5984687-d19b-42e1-87fc-2c3629fee41c tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.227s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.745734] env[61852]: DEBUG nova.network.neutron [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Successfully created port: 3f9ce73a-5444-428a-a963-f389921fbf99 {{(pid=61852) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1246.945749] env[61852]: DEBUG nova.compute.manager [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Start building block device mappings for instance. {{(pid=61852) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1247.321568] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d0d109ac-f203-4b68-b973-32d868d8270f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.321825] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.825971] env[61852]: DEBUG nova.compute.utils [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1247.955696] env[61852]: DEBUG nova.compute.manager [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Start spawning the instance on the hypervisor. {{(pid=61852) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1247.979858] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T17:18:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T17:18:23Z,direct_url=,disk_format='vmdk',id=90fd8f39-16b3-43e0-a682-0ec131005e31,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f81b609db0954f0a9e9474a2fd875f0c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T17:18:24Z,virtual_size=,visibility=), allow threads: False {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1247.980138] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Flavor limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1247.980305] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Image limits 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1247.980494] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Flavor pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1247.980648] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Image pref 0:0:0 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1247.980882] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61852) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1247.981119] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1247.981291] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1247.981466] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Got 1 possible topologies {{(pid=61852) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1247.981635] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1247.981812] env[61852]: DEBUG nova.virt.hardware [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61852) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1247.982757] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d6434e-2b8c-4fc3-9de7-4170115cff66 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.990856] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6be32d-88df-4370-9a28-24ae94a3d31d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.114892] env[61852]: DEBUG nova.compute.manager [req-3d0361b9-acfc-47ca-a551-cb39eecb88ac req-dfed05d7-c050-46d0-a93c-0529512f3ce6 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Received event network-vif-plugged-3f9ce73a-5444-428a-a963-f389921fbf99 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1248.115130] env[61852]: DEBUG oslo_concurrency.lockutils [req-3d0361b9-acfc-47ca-a551-cb39eecb88ac req-dfed05d7-c050-46d0-a93c-0529512f3ce6 service nova] Acquiring lock "3493166e-5559-4eb6-a53c-4348d2b46aa0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.115354] env[61852]: DEBUG oslo_concurrency.lockutils [req-3d0361b9-acfc-47ca-a551-cb39eecb88ac req-dfed05d7-c050-46d0-a93c-0529512f3ce6 service nova] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.115527] env[61852]: DEBUG oslo_concurrency.lockutils [req-3d0361b9-acfc-47ca-a551-cb39eecb88ac req-dfed05d7-c050-46d0-a93c-0529512f3ce6 service nova] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.115695] env[61852]: DEBUG nova.compute.manager [req-3d0361b9-acfc-47ca-a551-cb39eecb88ac req-dfed05d7-c050-46d0-a93c-0529512f3ce6 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] No waiting events found dispatching network-vif-plugged-3f9ce73a-5444-428a-a963-f389921fbf99 {{(pid=61852) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1248.115862] env[61852]: WARNING nova.compute.manager [req-3d0361b9-acfc-47ca-a551-cb39eecb88ac req-dfed05d7-c050-46d0-a93c-0529512f3ce6 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Received unexpected event network-vif-plugged-3f9ce73a-5444-428a-a963-f389921fbf99 for instance with vm_state building and task_state spawning. [ 1248.198149] env[61852]: DEBUG nova.network.neutron [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Successfully updated port: 3f9ce73a-5444-428a-a963-f389921fbf99 {{(pid=61852) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1248.328995] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.701541] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "refresh_cache-3493166e-5559-4eb6-a53c-4348d2b46aa0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.701713] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "refresh_cache-3493166e-5559-4eb6-a53c-4348d2b46aa0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.701943] env[61852]: DEBUG nova.network.neutron [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Building network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1249.247636] env[61852]: DEBUG nova.network.neutron [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Instance cache missing network info. {{(pid=61852) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1249.390078] env[61852]: DEBUG nova.network.neutron [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Updating instance_info_cache with network_info: [{"id": "3f9ce73a-5444-428a-a963-f389921fbf99", "address": "fa:16:3e:d3:01:5b", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f9ce73a-54", "ovs_interfaceid": "3f9ce73a-5444-428a-a963-f389921fbf99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.392024] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d0d109ac-f203-4b68-b973-32d868d8270f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.392024] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.392209] env[61852]: INFO nova.compute.manager [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Attaching volume 2b1a71df-a087-41b9-8749-50de3628dac0 to /dev/sdc [ 1249.424233] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e085da-072a-4fd2-8d5e-9a2d65c97f9d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.431414] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74031791-5ec5-4fa5-8882-ac6dcacf0c9f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.444833] env[61852]: DEBUG nova.virt.block_device [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updating existing volume attachment record: 882c27b4-2b17-4fe9-a215-11f8df362e14 {{(pid=61852) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1249.894895] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "refresh_cache-3493166e-5559-4eb6-a53c-4348d2b46aa0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1249.895256] env[61852]: DEBUG nova.compute.manager [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Instance network_info: |[{"id": "3f9ce73a-5444-428a-a963-f389921fbf99", "address": "fa:16:3e:d3:01:5b", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f9ce73a-54", "ovs_interfaceid": "3f9ce73a-5444-428a-a963-f389921fbf99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61852) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1249.895761] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:01:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f9ce73a-5444-428a-a963-f389921fbf99', 'vif_model': 'vmxnet3'}] {{(pid=61852) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1249.904578] env[61852]: DEBUG oslo.service.loopingcall [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1249.904950] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Creating VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1249.905330] env[61852]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89d247a2-cdcf-48ea-94b8-6bcc934376de {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.928695] env[61852]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1249.928695] env[61852]: value = "task-1293481" [ 1249.928695] env[61852]: _type = "Task" [ 1249.928695] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.937087] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293481, 'name': CreateVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.142942] env[61852]: DEBUG nova.compute.manager [req-e05ca51c-a4da-4a0d-8bae-258a33630568 req-0d75e9a1-6798-4fe0-917b-c742c181dbb2 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Received event network-changed-3f9ce73a-5444-428a-a963-f389921fbf99 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1250.143159] env[61852]: DEBUG nova.compute.manager [req-e05ca51c-a4da-4a0d-8bae-258a33630568 req-0d75e9a1-6798-4fe0-917b-c742c181dbb2 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Refreshing instance network info cache due to event network-changed-3f9ce73a-5444-428a-a963-f389921fbf99. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1250.143380] env[61852]: DEBUG oslo_concurrency.lockutils [req-e05ca51c-a4da-4a0d-8bae-258a33630568 req-0d75e9a1-6798-4fe0-917b-c742c181dbb2 service nova] Acquiring lock "refresh_cache-3493166e-5559-4eb6-a53c-4348d2b46aa0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.143527] env[61852]: DEBUG oslo_concurrency.lockutils [req-e05ca51c-a4da-4a0d-8bae-258a33630568 req-0d75e9a1-6798-4fe0-917b-c742c181dbb2 service nova] Acquired lock "refresh_cache-3493166e-5559-4eb6-a53c-4348d2b46aa0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.143685] env[61852]: DEBUG nova.network.neutron [req-e05ca51c-a4da-4a0d-8bae-258a33630568 req-0d75e9a1-6798-4fe0-917b-c742c181dbb2 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Refreshing network info cache for port 3f9ce73a-5444-428a-a963-f389921fbf99 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1250.438879] env[61852]: DEBUG oslo_vmware.api [-] Task: {'id': task-1293481, 'name': CreateVM_Task, 'duration_secs': 0.318268} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.439235] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Created VM on the ESX host {{(pid=61852) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1250.439729] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.439899] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.440267] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1250.440516] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0645e16e-5fef-4fb7-ac4d-9ad458664dd3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.444670] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1250.444670] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52171e45-bf38-5a7d-893f-3b3327ba65d8" [ 1250.444670] env[61852]: _type = "Task" [ 1250.444670] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.451766] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52171e45-bf38-5a7d-893f-3b3327ba65d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.821735] env[61852]: DEBUG nova.network.neutron [req-e05ca51c-a4da-4a0d-8bae-258a33630568 req-0d75e9a1-6798-4fe0-917b-c742c181dbb2 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Updated VIF entry in instance network info cache for port 3f9ce73a-5444-428a-a963-f389921fbf99. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1250.822780] env[61852]: DEBUG nova.network.neutron [req-e05ca51c-a4da-4a0d-8bae-258a33630568 req-0d75e9a1-6798-4fe0-917b-c742c181dbb2 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Updating instance_info_cache with network_info: [{"id": "3f9ce73a-5444-428a-a963-f389921fbf99", "address": "fa:16:3e:d3:01:5b", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f9ce73a-54", "ovs_interfaceid": "3f9ce73a-5444-428a-a963-f389921fbf99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.955365] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52171e45-bf38-5a7d-893f-3b3327ba65d8, 'name': SearchDatastore_Task, 'duration_secs': 0.009083} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.955622] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1250.955867] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Processing image 90fd8f39-16b3-43e0-a682-0ec131005e31 {{(pid=61852) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1250.956116] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.956269] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.956455] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1250.956754] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29473d12-7d8f-4613-86bb-492ca5544d24 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.964366] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61852) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1250.964548] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61852) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1250.965232] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f45bfa7d-fe36-4b5e-affc-4f6cbf108e93 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.969921] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1250.969921] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52aed38c-676e-da1e-d81e-63a7e760e397" [ 1250.969921] env[61852]: _type = "Task" [ 1250.969921] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.978216] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52aed38c-676e-da1e-d81e-63a7e760e397, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.324581] env[61852]: DEBUG oslo_concurrency.lockutils [req-e05ca51c-a4da-4a0d-8bae-258a33630568 req-0d75e9a1-6798-4fe0-917b-c742c181dbb2 service nova] Releasing lock "refresh_cache-3493166e-5559-4eb6-a53c-4348d2b46aa0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.479649] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52aed38c-676e-da1e-d81e-63a7e760e397, 'name': SearchDatastore_Task, 'duration_secs': 0.008024} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.480440] env[61852]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64b15c72-2986-4870-812f-313afd764233 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.485077] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1251.485077] env[61852]: value = "session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e3149e-d7f8-c471-6313-78dfd9734190" [ 1251.485077] env[61852]: _type = "Task" [ 1251.485077] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.491962] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e3149e-d7f8-c471-6313-78dfd9734190, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.998364] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': session[52f211ed-fcd4-3895-6de3-e9e0cc7f027d]52e3149e-d7f8-c471-6313-78dfd9734190, 'name': SearchDatastore_Task, 'duration_secs': 0.009088} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.998788] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.999200] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 3493166e-5559-4eb6-a53c-4348d2b46aa0/3493166e-5559-4eb6-a53c-4348d2b46aa0.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1251.999576] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0057a5fd-5807-49ef-b255-b7545681d05a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.006996] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1252.006996] env[61852]: value = "task-1293483" [ 1252.006996] env[61852]: _type = "Task" [ 1252.006996] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.016991] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293483, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.516019] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293483, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.419564} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.516386] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/90fd8f39-16b3-43e0-a682-0ec131005e31/90fd8f39-16b3-43e0-a682-0ec131005e31.vmdk to [datastore1] 3493166e-5559-4eb6-a53c-4348d2b46aa0/3493166e-5559-4eb6-a53c-4348d2b46aa0.vmdk {{(pid=61852) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1252.516477] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Extending root virtual disk to 1048576 {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1252.516719] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f4a5bb1-3a28-4bbb-859d-a3822249a916 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.522668] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1252.522668] env[61852]: value = "task-1293484" [ 1252.522668] env[61852]: _type = "Task" [ 1252.522668] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.529680] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293484, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.032831] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293484, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068543} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.033121] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Extended root virtual disk {{(pid=61852) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1253.033869] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8689c0-580e-4d3a-8d0d-ecd230b99333 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.054736] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 3493166e-5559-4eb6-a53c-4348d2b46aa0/3493166e-5559-4eb6-a53c-4348d2b46aa0.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1253.054999] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22a25c34-c4cf-44fa-93b0-0fdf4103151b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.074569] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1253.074569] env[61852]: value = "task-1293485" [ 1253.074569] env[61852]: _type = "Task" [ 1253.074569] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.084222] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293485, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.585022] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293485, 'name': ReconfigVM_Task, 'duration_secs': 0.262377} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.585392] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 3493166e-5559-4eb6-a53c-4348d2b46aa0/3493166e-5559-4eb6-a53c-4348d2b46aa0.vmdk or device None with type sparse {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1253.585961] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbab4ec5-17d6-476b-81ac-aa9d1b3dba57 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.591465] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1253.591465] env[61852]: value = "task-1293486" [ 1253.591465] env[61852]: _type = "Task" [ 1253.591465] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.598643] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293486, 'name': Rename_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.989714] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Volume attach. Driver type: vmdk {{(pid=61852) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1253.989972] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277444', 'volume_id': '2b1a71df-a087-41b9-8749-50de3628dac0', 'name': 'volume-2b1a71df-a087-41b9-8749-50de3628dac0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd0d109ac-f203-4b68-b973-32d868d8270f', 'attached_at': '', 'detached_at': '', 'volume_id': '2b1a71df-a087-41b9-8749-50de3628dac0', 'serial': '2b1a71df-a087-41b9-8749-50de3628dac0'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1253.990860] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efdb181e-bbff-4de4-bffc-e2a9941238eb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.007677] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34e70c4-a629-475c-b40c-8d8d82e0453e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.034808] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-2b1a71df-a087-41b9-8749-50de3628dac0/volume-2b1a71df-a087-41b9-8749-50de3628dac0.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1254.035076] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2404938-c644-41e6-a2c6-dce9954e1843 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.051504] env[61852]: DEBUG oslo_vmware.api [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1254.051504] env[61852]: value = "task-1293487" [ 1254.051504] env[61852]: _type = "Task" [ 1254.051504] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.058734] env[61852]: DEBUG oslo_vmware.api [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293487, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.100331] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293486, 'name': Rename_Task, 'duration_secs': 0.125958} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.100650] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Powering on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1254.100910] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c29f49f-6430-47a1-8f74-3df8a9405d74 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.106527] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1254.106527] env[61852]: value = "task-1293488" [ 1254.106527] env[61852]: _type = "Task" [ 1254.106527] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.113724] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293488, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.561120] env[61852]: DEBUG oslo_vmware.api [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293487, 'name': ReconfigVM_Task, 'duration_secs': 0.351672} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.561387] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-2b1a71df-a087-41b9-8749-50de3628dac0/volume-2b1a71df-a087-41b9-8749-50de3628dac0.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1254.565942] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e9083bf-3705-4bed-8ed9-917ec611663a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.580445] env[61852]: DEBUG oslo_vmware.api [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1254.580445] env[61852]: value = "task-1293489" [ 1254.580445] env[61852]: _type = "Task" [ 1254.580445] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.587738] env[61852]: DEBUG oslo_vmware.api [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293489, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.614322] env[61852]: DEBUG oslo_vmware.api [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293488, 'name': PowerOnVM_Task, 'duration_secs': 0.418995} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.614572] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Powered on the VM {{(pid=61852) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1254.614776] env[61852]: INFO nova.compute.manager [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Took 6.66 seconds to spawn the instance on the hypervisor. [ 1254.614958] env[61852]: DEBUG nova.compute.manager [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Checking state {{(pid=61852) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1254.615694] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0342b8f-b281-41bc-b926-806f1e1e66ee {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.090867] env[61852]: DEBUG oslo_vmware.api [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293489, 'name': ReconfigVM_Task, 'duration_secs': 0.126341} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.091196] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277444', 'volume_id': '2b1a71df-a087-41b9-8749-50de3628dac0', 'name': 'volume-2b1a71df-a087-41b9-8749-50de3628dac0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd0d109ac-f203-4b68-b973-32d868d8270f', 'attached_at': '', 'detached_at': '', 'volume_id': '2b1a71df-a087-41b9-8749-50de3628dac0', 'serial': '2b1a71df-a087-41b9-8749-50de3628dac0'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1255.134981] env[61852]: INFO nova.compute.manager [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Took 11.34 seconds to build instance. [ 1255.637415] env[61852]: DEBUG oslo_concurrency.lockutils [None req-efa94559-523c-41fe-a71c-7f026aa67cc9 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.851s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.915886] env[61852]: DEBUG nova.compute.manager [req-30400e16-1b7e-422b-94aa-c66fb515977b req-ed4abe09-bb33-49c1-8c7b-e66bb7878942 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Received event network-changed-3f9ce73a-5444-428a-a963-f389921fbf99 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1255.916104] env[61852]: DEBUG nova.compute.manager [req-30400e16-1b7e-422b-94aa-c66fb515977b req-ed4abe09-bb33-49c1-8c7b-e66bb7878942 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Refreshing instance network info cache due to event network-changed-3f9ce73a-5444-428a-a963-f389921fbf99. {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1255.916340] env[61852]: DEBUG oslo_concurrency.lockutils [req-30400e16-1b7e-422b-94aa-c66fb515977b req-ed4abe09-bb33-49c1-8c7b-e66bb7878942 service nova] Acquiring lock "refresh_cache-3493166e-5559-4eb6-a53c-4348d2b46aa0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1255.916490] env[61852]: DEBUG oslo_concurrency.lockutils [req-30400e16-1b7e-422b-94aa-c66fb515977b req-ed4abe09-bb33-49c1-8c7b-e66bb7878942 service nova] Acquired lock "refresh_cache-3493166e-5559-4eb6-a53c-4348d2b46aa0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.916653] env[61852]: DEBUG nova.network.neutron [req-30400e16-1b7e-422b-94aa-c66fb515977b req-ed4abe09-bb33-49c1-8c7b-e66bb7878942 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Refreshing network info cache for port 3f9ce73a-5444-428a-a963-f389921fbf99 {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1256.128155] env[61852]: DEBUG nova.objects.instance [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'flavor' on Instance uuid d0d109ac-f203-4b68-b973-32d868d8270f {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1256.156879] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.157115] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.157264] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1256.625441] env[61852]: DEBUG nova.network.neutron [req-30400e16-1b7e-422b-94aa-c66fb515977b req-ed4abe09-bb33-49c1-8c7b-e66bb7878942 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Updated VIF entry in instance network info cache for port 3f9ce73a-5444-428a-a963-f389921fbf99. {{(pid=61852) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1256.625791] env[61852]: DEBUG nova.network.neutron [req-30400e16-1b7e-422b-94aa-c66fb515977b req-ed4abe09-bb33-49c1-8c7b-e66bb7878942 service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Updating instance_info_cache with network_info: [{"id": "3f9ce73a-5444-428a-a963-f389921fbf99", "address": "fa:16:3e:d3:01:5b", "network": {"id": "84f117d3-1eaf-4f99-9240-7342ce499c83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1473985775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b019fd876c14428bd8f2de5fa66da4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f9ce73a-54", "ovs_interfaceid": "3f9ce73a-5444-428a-a963-f389921fbf99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.632981] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1c266bab-4802-4825-b090-89915e0f5ab2 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.241s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.927494] env[61852]: DEBUG oslo_concurrency.lockutils [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d0d109ac-f203-4b68-b973-32d868d8270f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.927871] env[61852]: DEBUG oslo_concurrency.lockutils [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.128832] env[61852]: DEBUG oslo_concurrency.lockutils [req-30400e16-1b7e-422b-94aa-c66fb515977b req-ed4abe09-bb33-49c1-8c7b-e66bb7878942 service nova] Releasing lock "refresh_cache-3493166e-5559-4eb6-a53c-4348d2b46aa0" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.157014] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.157295] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.431260] env[61852]: INFO nova.compute.manager [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Detaching volume c604b3e9-d6d7-4c15-8d2e-d8691f5800c2 [ 1257.464019] env[61852]: INFO nova.virt.block_device [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Attempting to driver detach volume c604b3e9-d6d7-4c15-8d2e-d8691f5800c2 from mountpoint /dev/sdb [ 1257.464019] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Volume detach. Driver type: vmdk {{(pid=61852) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1257.464019] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277443', 'volume_id': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'name': 'volume-c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd0d109ac-f203-4b68-b973-32d868d8270f', 'attached_at': '', 'detached_at': '', 'volume_id': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'serial': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1257.464807] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51492de8-cf7b-4125-a973-e2d5a5fa00e4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.488634] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d8459c-de6b-417f-8156-4b0ff436d0f4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.495504] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67006d05-ffa7-4ef9-9176-4ee17f2eb940 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.518680] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc85927-ff3b-4f7b-ab7e-9d6fd8d88e17 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.532611] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] The volume has not been displaced from its original location: [datastore1] volume-c604b3e9-d6d7-4c15-8d2e-d8691f5800c2/volume-c604b3e9-d6d7-4c15-8d2e-d8691f5800c2.vmdk. No consolidation needed. {{(pid=61852) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1257.537741] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1257.538013] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-802f594b-7cbb-4dca-8d11-89352cdf468a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.554900] env[61852]: DEBUG oslo_vmware.api [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1257.554900] env[61852]: value = "task-1293490" [ 1257.554900] env[61852]: _type = "Task" [ 1257.554900] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.562126] env[61852]: DEBUG oslo_vmware.api [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293490, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.660517] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.660747] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.660918] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.661090] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1257.662022] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6998b0a0-1127-46bf-96d5-7b692338bce1 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.669739] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45996d1d-15dc-477b-9029-39a83cfb99cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.683836] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9b3d27-1837-48fb-9314-05a96479ef1c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.690328] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e00ca98-ea92-42d5-a708-186800f8631b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.718266] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181105MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1257.718437] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.718628] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.067386] env[61852]: DEBUG oslo_vmware.api [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293490, 'name': ReconfigVM_Task, 'duration_secs': 0.219933} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.067807] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1258.075353] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-491f4ceb-6b16-47e2-afeb-44b3dc278ee6 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.094654] env[61852]: DEBUG oslo_vmware.api [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1258.094654] env[61852]: value = "task-1293491" [ 1258.094654] env[61852]: _type = "Task" [ 1258.094654] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.103398] env[61852]: DEBUG oslo_vmware.api [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293491, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.604695] env[61852]: DEBUG oslo_vmware.api [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293491, 'name': ReconfigVM_Task, 'duration_secs': 0.164818} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.605008] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277443', 'volume_id': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'name': 'volume-c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd0d109ac-f203-4b68-b973-32d868d8270f', 'attached_at': '', 'detached_at': '', 'volume_id': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2', 'serial': 'c604b3e9-d6d7-4c15-8d2e-d8691f5800c2'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1258.741289] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance d0d109ac-f203-4b68-b973-32d868d8270f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1258.741435] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Instance 3493166e-5559-4eb6-a53c-4348d2b46aa0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61852) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1258.741602] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1258.741742] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1258.774683] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d7b972-2561-4ef4-9166-bc72c46315f3 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.781674] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ceb05d-b719-4b11-bc4a-1c369b523fb7 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.810214] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bdad85-f8b9-4188-8f3d-6f64542529fc {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.816995] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84eb324-53bc-40ee-bfc0-6660950ac48d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.829600] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1259.147685] env[61852]: DEBUG nova.objects.instance [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'flavor' on Instance uuid d0d109ac-f203-4b68-b973-32d868d8270f {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.332753] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1259.837752] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1259.837752] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.119s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.158904] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d0d109ac-f203-4b68-b973-32d868d8270f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.159388] env[61852]: DEBUG oslo_concurrency.lockutils [None req-558f29a6-5224-4ca9-bbfa-58a48fa67d9f tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.232s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.160434] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.002s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.664023] env[61852]: INFO nova.compute.manager [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Detaching volume 2b1a71df-a087-41b9-8749-50de3628dac0 [ 1260.696207] env[61852]: INFO nova.virt.block_device [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Attempting to driver detach volume 2b1a71df-a087-41b9-8749-50de3628dac0 from mountpoint /dev/sdc [ 1260.696482] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Volume detach. Driver type: vmdk {{(pid=61852) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1260.696678] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277444', 'volume_id': '2b1a71df-a087-41b9-8749-50de3628dac0', 'name': 'volume-2b1a71df-a087-41b9-8749-50de3628dac0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd0d109ac-f203-4b68-b973-32d868d8270f', 'attached_at': '', 'detached_at': '', 'volume_id': '2b1a71df-a087-41b9-8749-50de3628dac0', 'serial': '2b1a71df-a087-41b9-8749-50de3628dac0'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1260.697590] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9601e2-6035-4e32-88b3-35eaf2b49a1c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.719174] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6935dec8-479f-419a-a815-5b78368cf6cd {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.725777] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357aa11d-5ddd-4217-8f8b-75d0824934d5 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.744927] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deeba74f-6574-4891-b15e-c51965934d36 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.759123] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] The volume has not been displaced from its original location: [datastore1] volume-2b1a71df-a087-41b9-8749-50de3628dac0/volume-2b1a71df-a087-41b9-8749-50de3628dac0.vmdk. No consolidation needed. {{(pid=61852) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1260.764562] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Reconfiguring VM instance instance-0000006b to detach disk 2002 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1260.764832] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bf4e0ef-a530-424f-a20e-4e229848f421 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.781219] env[61852]: DEBUG oslo_vmware.api [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1260.781219] env[61852]: value = "task-1293492" [ 1260.781219] env[61852]: _type = "Task" [ 1260.781219] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.788290] env[61852]: DEBUG oslo_vmware.api [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293492, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.832940] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1260.833175] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1260.833326] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1260.833447] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Rebuilding the list of instances to heal {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1261.291258] env[61852]: DEBUG oslo_vmware.api [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293492, 'name': ReconfigVM_Task, 'duration_secs': 0.205471} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.291641] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Reconfigured VM instance instance-0000006b to detach disk 2002 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1261.296046] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-036cf332-5ebd-44cb-8997-e8a0b69abc8a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.311014] env[61852]: DEBUG oslo_vmware.api [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1261.311014] env[61852]: value = "task-1293493" [ 1261.311014] env[61852]: _type = "Task" [ 1261.311014] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.320114] env[61852]: DEBUG oslo_vmware.api [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293493, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.377219] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1261.377401] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquired lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.377554] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Forcefully refreshing network info cache for instance {{(pid=61852) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1261.377711] env[61852]: DEBUG nova.objects.instance [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lazy-loading 'info_cache' on Instance uuid d0d109ac-f203-4b68-b973-32d868d8270f {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1261.820710] env[61852]: DEBUG oslo_vmware.api [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293493, 'name': ReconfigVM_Task, 'duration_secs': 0.128771} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.821022] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277444', 'volume_id': '2b1a71df-a087-41b9-8749-50de3628dac0', 'name': 'volume-2b1a71df-a087-41b9-8749-50de3628dac0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd0d109ac-f203-4b68-b973-32d868d8270f', 'attached_at': '', 'detached_at': '', 'volume_id': '2b1a71df-a087-41b9-8749-50de3628dac0', 'serial': '2b1a71df-a087-41b9-8749-50de3628dac0'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1262.363427] env[61852]: DEBUG nova.objects.instance [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'flavor' on Instance uuid d0d109ac-f203-4b68-b973-32d868d8270f {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1263.093537] env[61852]: DEBUG nova.network.neutron [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updating instance_info_cache with network_info: [{"id": "2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9", "address": "fa:16:3e:59:80:45", "network": {"id": "07c444d7-03d4-406b-bb66-de44a92b43d6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-818710190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a10be4b0f16c432c87b39b211fbf2fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f4cf9a5-1b", "ovs_interfaceid": "2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.369988] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0299b4d8-5974-46e8-977d-1a77618bf700 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.209s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.595763] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Releasing lock "refresh_cache-d0d109ac-f203-4b68-b973-32d868d8270f" {{(pid=61852) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1263.595929] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updated the network info_cache for instance {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1263.596158] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.596327] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.596476] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.524442] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d0d109ac-f203-4b68-b973-32d868d8270f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.524804] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.525071] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "d0d109ac-f203-4b68-b973-32d868d8270f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.525279] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.525458] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.527491] env[61852]: INFO nova.compute.manager [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Terminating instance [ 1264.529200] env[61852]: DEBUG nova.compute.manager [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1264.529394] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1264.530289] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f07da3-04ca-4900-9218-259a755f706f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.537914] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1264.538148] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a14608e9-7781-43c3-afc0-b71db4c752eb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.543739] env[61852]: DEBUG oslo_vmware.api [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1264.543739] env[61852]: value = "task-1293494" [ 1264.543739] env[61852]: _type = "Task" [ 1264.543739] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.551008] env[61852]: DEBUG oslo_vmware.api [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.053711] env[61852]: DEBUG oslo_vmware.api [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293494, 'name': PowerOffVM_Task, 'duration_secs': 0.168125} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.054335] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1265.054335] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1265.054495] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77043efb-88c6-4726-a92a-70b710fcc3a8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.111527] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1265.111742] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1265.111931] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Deleting the datastore file [datastore1] d0d109ac-f203-4b68-b973-32d868d8270f {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1265.112206] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f164b64a-972b-424a-b4b8-899fbbad999c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.117700] env[61852]: DEBUG oslo_vmware.api [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for the task: (returnval){ [ 1265.117700] env[61852]: value = "task-1293496" [ 1265.117700] env[61852]: _type = "Task" [ 1265.117700] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.125874] env[61852]: DEBUG oslo_vmware.api [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293496, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.627331] env[61852]: DEBUG oslo_vmware.api [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Task: {'id': task-1293496, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127536} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.627753] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1265.627753] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1265.627930] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1265.628128] env[61852]: INFO nova.compute.manager [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1265.628389] env[61852]: DEBUG oslo.service.loopingcall [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1265.628603] env[61852]: DEBUG nova.compute.manager [-] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1265.628701] env[61852]: DEBUG nova.network.neutron [-] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1266.062949] env[61852]: DEBUG nova.compute.manager [req-b087a507-01c7-4a59-acad-a80ff16789da req-06c42684-957a-473f-88f0-3e23c4a59e8e service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Received event network-vif-deleted-2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1266.063188] env[61852]: INFO nova.compute.manager [req-b087a507-01c7-4a59-acad-a80ff16789da req-06c42684-957a-473f-88f0-3e23c4a59e8e service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Neutron deleted interface 2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9; detaching it from the instance and deleting it from the info cache [ 1266.063188] env[61852]: DEBUG nova.network.neutron [req-b087a507-01c7-4a59-acad-a80ff16789da req-06c42684-957a-473f-88f0-3e23c4a59e8e service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.541746] env[61852]: DEBUG nova.network.neutron [-] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.565556] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b18bf71b-93ec-46e8-83ad-de2417fa1c65 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.575383] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5606a836-ad3b-4b5d-91a9-fd384676d97d {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.598305] env[61852]: DEBUG nova.compute.manager [req-b087a507-01c7-4a59-acad-a80ff16789da req-06c42684-957a-473f-88f0-3e23c4a59e8e service nova] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Detach interface failed, port_id=2f4cf9a5-1bbd-483d-b171-f31ccc69f1a9, reason: Instance d0d109ac-f203-4b68-b973-32d868d8270f could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1267.044997] env[61852]: INFO nova.compute.manager [-] [instance: d0d109ac-f203-4b68-b973-32d868d8270f] Took 1.42 seconds to deallocate network for instance. [ 1267.551436] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.551695] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.551911] env[61852]: DEBUG nova.objects.instance [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lazy-loading 'resources' on Instance uuid d0d109ac-f203-4b68-b973-32d868d8270f {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1268.096020] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ed53d1-9674-4973-a6d9-0921c4ff0c14 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.103229] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b701b2-5495-4591-b735-24bc187264e2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.133528] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36e4ffe-2fe0-4fdf-bd34-64753a61dabb {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.140632] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01dea52-1381-4b7e-b120-528ac393c056 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.153359] env[61852]: DEBUG nova.compute.provider_tree [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.656950] env[61852]: DEBUG nova.scheduler.client.report [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1269.162029] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.610s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1269.183025] env[61852]: INFO nova.scheduler.client.report [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Deleted allocations for instance d0d109ac-f203-4b68-b973-32d868d8270f [ 1269.691679] env[61852]: DEBUG oslo_concurrency.lockutils [None req-41c32d77-f641-4d9a-b712-0d8ad8239857 tempest-AttachVolumeTestJSON-391606047 tempest-AttachVolumeTestJSON-391606047-project-member] Lock "d0d109ac-f203-4b68-b973-32d868d8270f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.167s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.472027] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.472027] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.974502] env[61852]: DEBUG nova.compute.utils [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Using /dev/sd instead of None {{(pid=61852) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1294.477397] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.536284] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.536723] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.536803] env[61852]: INFO nova.compute.manager [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Attaching volume 6169555d-4662-4255-9290-e6c5b41b4c20 to /dev/sdb [ 1295.566390] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cf590c-ca0d-4894-b70d-95590a0cfb47 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.573680] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6eae781-a1d4-4de3-900f-c7b8c905bfd2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.586136] env[61852]: DEBUG nova.virt.block_device [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Updating existing volume attachment record: 2be77aa4-7fab-4696-a582-e71c4003955e {{(pid=61852) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1300.127974] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Volume attach. Driver type: vmdk {{(pid=61852) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1300.128310] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277446', 'volume_id': '6169555d-4662-4255-9290-e6c5b41b4c20', 'name': 'volume-6169555d-4662-4255-9290-e6c5b41b4c20', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3493166e-5559-4eb6-a53c-4348d2b46aa0', 'attached_at': '', 'detached_at': '', 'volume_id': '6169555d-4662-4255-9290-e6c5b41b4c20', 'serial': '6169555d-4662-4255-9290-e6c5b41b4c20'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1300.129173] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c0472a-4a8b-4f58-8832-ef1e09be80a4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.145590] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088510a8-19db-455b-8644-7e068ba30e11 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.168493] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] volume-6169555d-4662-4255-9290-e6c5b41b4c20/volume-6169555d-4662-4255-9290-e6c5b41b4c20.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1300.168727] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f334787-4f00-49c4-8217-0672717fff59 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.185566] env[61852]: DEBUG oslo_vmware.api [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1300.185566] env[61852]: value = "task-1293502" [ 1300.185566] env[61852]: _type = "Task" [ 1300.185566] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.193090] env[61852]: DEBUG oslo_vmware.api [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293502, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.696362] env[61852]: DEBUG oslo_vmware.api [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293502, 'name': ReconfigVM_Task, 'duration_secs': 0.34827} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.696629] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Reconfigured VM instance instance-0000006c to attach disk [datastore1] volume-6169555d-4662-4255-9290-e6c5b41b4c20/volume-6169555d-4662-4255-9290-e6c5b41b4c20.vmdk or device None with type thin {{(pid=61852) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1300.701294] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c48409e3-c320-4fc9-96fc-3a2078518fa4 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.715717] env[61852]: DEBUG oslo_vmware.api [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1300.715717] env[61852]: value = "task-1293503" [ 1300.715717] env[61852]: _type = "Task" [ 1300.715717] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.726019] env[61852]: DEBUG oslo_vmware.api [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293503, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.225695] env[61852]: DEBUG oslo_vmware.api [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293503, 'name': ReconfigVM_Task, 'duration_secs': 0.183128} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.225998] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277446', 'volume_id': '6169555d-4662-4255-9290-e6c5b41b4c20', 'name': 'volume-6169555d-4662-4255-9290-e6c5b41b4c20', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3493166e-5559-4eb6-a53c-4348d2b46aa0', 'attached_at': '', 'detached_at': '', 'volume_id': '6169555d-4662-4255-9290-e6c5b41b4c20', 'serial': '6169555d-4662-4255-9290-e6c5b41b4c20'} {{(pid=61852) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1302.261015] env[61852]: DEBUG nova.objects.instance [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lazy-loading 'flavor' on Instance uuid 3493166e-5559-4eb6-a53c-4348d2b46aa0 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1302.767548] env[61852]: DEBUG oslo_concurrency.lockutils [None req-0514896a-15d0-4e58-8db9-ef50d384ba6c tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.231s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.944901] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.945185] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.448022] env[61852]: INFO nova.compute.manager [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Detaching volume 6169555d-4662-4255-9290-e6c5b41b4c20 [ 1303.477783] env[61852]: INFO nova.virt.block_device [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Attempting to driver detach volume 6169555d-4662-4255-9290-e6c5b41b4c20 from mountpoint /dev/sdb [ 1303.478037] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Volume detach. Driver type: vmdk {{(pid=61852) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1303.478236] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277446', 'volume_id': '6169555d-4662-4255-9290-e6c5b41b4c20', 'name': 'volume-6169555d-4662-4255-9290-e6c5b41b4c20', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3493166e-5559-4eb6-a53c-4348d2b46aa0', 'attached_at': '', 'detached_at': '', 'volume_id': '6169555d-4662-4255-9290-e6c5b41b4c20', 'serial': '6169555d-4662-4255-9290-e6c5b41b4c20'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1303.479141] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fde5b73-0cd3-4b46-9670-43c4e761c14f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.499761] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ca4748-0159-47f1-9ebe-454afd48470c {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.506162] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00ecea0-2774-428f-a1ec-255fbbf4a9e2 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.525297] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1783d4da-e6c0-45eb-8412-afae20143026 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.539146] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] The volume has not been displaced from its original location: [datastore1] volume-6169555d-4662-4255-9290-e6c5b41b4c20/volume-6169555d-4662-4255-9290-e6c5b41b4c20.vmdk. No consolidation needed. {{(pid=61852) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1303.544337] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1303.544602] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6542038-ed71-4e0d-ac74-4328218d863a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.561633] env[61852]: DEBUG oslo_vmware.api [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1303.561633] env[61852]: value = "task-1293504" [ 1303.561633] env[61852]: _type = "Task" [ 1303.561633] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.570035] env[61852]: DEBUG oslo_vmware.api [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293504, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.071079] env[61852]: DEBUG oslo_vmware.api [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293504, 'name': ReconfigVM_Task, 'duration_secs': 0.188984} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.071360] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=61852) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1304.075825] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d39e9c98-4a25-406a-b127-9662060c903f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.090071] env[61852]: DEBUG oslo_vmware.api [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1304.090071] env[61852]: value = "task-1293505" [ 1304.090071] env[61852]: _type = "Task" [ 1304.090071] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.097691] env[61852]: DEBUG oslo_vmware.api [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293505, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.599912] env[61852]: DEBUG oslo_vmware.api [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293505, 'name': ReconfigVM_Task, 'duration_secs': 0.140602} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.600293] env[61852]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-277446', 'volume_id': '6169555d-4662-4255-9290-e6c5b41b4c20', 'name': 'volume-6169555d-4662-4255-9290-e6c5b41b4c20', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3493166e-5559-4eb6-a53c-4348d2b46aa0', 'attached_at': '', 'detached_at': '', 'volume_id': '6169555d-4662-4255-9290-e6c5b41b4c20', 'serial': '6169555d-4662-4255-9290-e6c5b41b4c20'} {{(pid=61852) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1305.140297] env[61852]: DEBUG nova.objects.instance [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lazy-loading 'flavor' on Instance uuid 3493166e-5559-4eb6-a53c-4348d2b46aa0 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1306.148167] env[61852]: DEBUG oslo_concurrency.lockutils [None req-1b9c651f-c29e-447f-ba5d-53fe0bbfbb59 tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.203s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.176372] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.176788] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.176872] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "3493166e-5559-4eb6-a53c-4348d2b46aa0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.177072] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.177253] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.179429] env[61852]: INFO nova.compute.manager [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Terminating instance [ 1307.181227] env[61852]: DEBUG nova.compute.manager [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Start destroying the instance on the hypervisor. {{(pid=61852) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1307.181325] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Destroying instance {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1307.182139] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e1357b-5c12-48b9-a4fd-48e82779894b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.190017] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Powering off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1307.190240] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-029b6440-860a-4858-a2b9-a0dcff269430 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.196311] env[61852]: DEBUG oslo_vmware.api [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1307.196311] env[61852]: value = "task-1293506" [ 1307.196311] env[61852]: _type = "Task" [ 1307.196311] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.203563] env[61852]: DEBUG oslo_vmware.api [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293506, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.706563] env[61852]: DEBUG oslo_vmware.api [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293506, 'name': PowerOffVM_Task, 'duration_secs': 0.200016} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.706826] env[61852]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Powered off the VM {{(pid=61852) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1307.707013] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Unregistering the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1307.707266] env[61852]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d629747-ef5c-4bb4-a5ac-786fb8518d76 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.765630] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Unregistered the VM {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1307.765843] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Deleting contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1307.766045] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleting the datastore file [datastore1] 3493166e-5559-4eb6-a53c-4348d2b46aa0 {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1307.766320] env[61852]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c213af91-ebf3-4eab-92f0-e23b477dc419 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.772202] env[61852]: DEBUG oslo_vmware.api [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for the task: (returnval){ [ 1307.772202] env[61852]: value = "task-1293508" [ 1307.772202] env[61852]: _type = "Task" [ 1307.772202] env[61852]: } to complete. {{(pid=61852) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.779888] env[61852]: DEBUG oslo_vmware.api [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293508, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.281675] env[61852]: DEBUG oslo_vmware.api [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Task: {'id': task-1293508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149528} completed successfully. {{(pid=61852) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.282067] env[61852]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleted the datastore file {{(pid=61852) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1308.282123] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Deleted contents of the VM from datastore datastore1 {{(pid=61852) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1308.282284] env[61852]: DEBUG nova.virt.vmwareapi.vmops [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Instance destroyed {{(pid=61852) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1308.282467] env[61852]: INFO nova.compute.manager [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1308.282707] env[61852]: DEBUG oslo.service.loopingcall [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61852) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1308.282900] env[61852]: DEBUG nova.compute.manager [-] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Deallocating network for instance {{(pid=61852) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1308.282996] env[61852]: DEBUG nova.network.neutron [-] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] deallocate_for_instance() {{(pid=61852) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1308.725342] env[61852]: DEBUG nova.compute.manager [req-3e728c5e-5e5e-4340-87d5-71648bf24f8e req-ab596592-27f8-4db6-b5c1-df7c4c46195e service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Received event network-vif-deleted-3f9ce73a-5444-428a-a963-f389921fbf99 {{(pid=61852) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1308.725545] env[61852]: INFO nova.compute.manager [req-3e728c5e-5e5e-4340-87d5-71648bf24f8e req-ab596592-27f8-4db6-b5c1-df7c4c46195e service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Neutron deleted interface 3f9ce73a-5444-428a-a963-f389921fbf99; detaching it from the instance and deleting it from the info cache [ 1308.725673] env[61852]: DEBUG nova.network.neutron [req-3e728c5e-5e5e-4340-87d5-71648bf24f8e req-ab596592-27f8-4db6-b5c1-df7c4c46195e service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.204392] env[61852]: DEBUG nova.network.neutron [-] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Updating instance_info_cache with network_info: [] {{(pid=61852) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.228354] env[61852]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4924b313-8dcb-463b-a506-e81b4f1b4764 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.238452] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b363f9-386f-4c58-bf6f-af0aea977fac {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.261093] env[61852]: DEBUG nova.compute.manager [req-3e728c5e-5e5e-4340-87d5-71648bf24f8e req-ab596592-27f8-4db6-b5c1-df7c4c46195e service nova] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Detach interface failed, port_id=3f9ce73a-5444-428a-a963-f389921fbf99, reason: Instance 3493166e-5559-4eb6-a53c-4348d2b46aa0 could not be found. {{(pid=61852) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1309.708273] env[61852]: INFO nova.compute.manager [-] [instance: 3493166e-5559-4eb6-a53c-4348d2b46aa0] Took 1.42 seconds to deallocate network for instance. [ 1310.216970] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1310.217280] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1310.217514] env[61852]: DEBUG nova.objects.instance [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lazy-loading 'resources' on Instance uuid 3493166e-5559-4eb6-a53c-4348d2b46aa0 {{(pid=61852) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1310.752718] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840738fb-fb20-4c92-b344-3f668ff21b6e {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.759992] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a416f77-963a-4ca6-b0a0-6d034223e72f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.789830] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9801233e-8361-4088-84b1-e63b7ce57ca8 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.796266] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8517f2-0d67-4502-8a85-92c39491b3d9 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.808891] env[61852]: DEBUG nova.compute.provider_tree [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1311.312360] env[61852]: DEBUG nova.scheduler.client.report [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1311.817827] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.600s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.838882] env[61852]: INFO nova.scheduler.client.report [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Deleted allocations for instance 3493166e-5559-4eb6-a53c-4348d2b46aa0 [ 1312.346949] env[61852]: DEBUG oslo_concurrency.lockutils [None req-ccc6ea2a-1614-41f5-bd16-dcedd0cb561d tempest-AttachVolumeNegativeTest-1946418435 tempest-AttachVolumeNegativeTest-1946418435-project-member] Lock "3493166e-5559-4eb6-a53c-4348d2b46aa0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.170s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.157488] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1317.158031] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1317.660449] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.660692] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.660875] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.661044] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61852) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1317.661952] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf5fc11-bf85-430d-a844-62a59085c748 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.670112] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca8de1a-33c8-415c-a8fa-b311c39ace88 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.683861] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8239f0a-1340-4160-93b5-88d2723d7735 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.689877] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3108cc-7a7f-404d-9389-7cc1138cff28 {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.722646] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181307MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61852) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1317.722816] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.722999] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.742169] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1318.742427] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61852) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1318.754829] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6330cab4-2b75-4354-8b80-39d1f412c05f {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.762110] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49007fd0-395f-47ad-b607-ffea51a9924a {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.790428] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238a4895-2d17-45b6-a642-ede916b2d25b {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.796835] env[61852]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f32f56-7871-47c9-a7f5-d071259a92ed {{(pid=61852) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.809234] env[61852]: DEBUG nova.compute.provider_tree [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed in ProviderTree for provider: f818062c-7b17-4bd0-94af-192a674543c3 {{(pid=61852) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1319.312537] env[61852]: DEBUG nova.scheduler.client.report [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Inventory has not changed for provider f818062c-7b17-4bd0-94af-192a674543c3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61852) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1319.818353] env[61852]: DEBUG nova.compute.resource_tracker [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61852) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1319.818732] env[61852]: DEBUG oslo_concurrency.lockutils [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.096s {{(pid=61852) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.818260] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.819103] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.819103] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.819103] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.819103] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61852) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1322.158107] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.158484] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Starting heal instance info cache {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1322.661549] env[61852]: DEBUG nova.compute.manager [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Didn't find any instances for network info cache update. {{(pid=61852) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1322.661878] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.157620] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1328.153287] env[61852]: DEBUG oslo_service.periodic_task [None req-17bff909-7b2d-4bb9-95c1-b68f12ee17b0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61852) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}